WIP
This commit is contained in:
parent
d1cebaca1a
commit
128ad5ee1f
@ -28,9 +28,6 @@ export function formatHttpResponse(results: ProcessResult[]): HttpResponse {
|
||||
const projects: ProjectSummary[] = results.map(result => {
|
||||
// Count workitems
|
||||
const workitemsProcessed = result.processedWorkitems.length;
|
||||
const workitemsSkipped = result.processedWorkitems.filter(w => w.success && w.status === "skip").length;
|
||||
const workitemsUpdated = result.processedWorkitems.filter(w => w.success && w.status === "update").length;
|
||||
const workitemsCreated = result.processedWorkitems.filter(w => w.success && w.status === 'create').length;
|
||||
const filesWritten = result.processedWorkitems.reduce((sum, w) => sum + (w.filesWritten?.length || 0), 0);
|
||||
|
||||
return {
|
||||
@ -38,9 +35,6 @@ export function formatHttpResponse(results: ProcessResult[]): HttpResponse {
|
||||
success: !result.error,
|
||||
error: result.error,
|
||||
workitemsProcessed,
|
||||
workitemsSkipped,
|
||||
workitemsUpdated,
|
||||
workitemsCreated,
|
||||
filesWritten,
|
||||
pullRequestUrl: result.pullRequestUrl,
|
||||
};
|
||||
|
@ -22,7 +22,6 @@ describe('formatHttpResponse', () => {
|
||||
isActive: true
|
||||
},
|
||||
success: true,
|
||||
status: 'update'
|
||||
},
|
||||
{
|
||||
workitem: {
|
||||
@ -33,7 +32,6 @@ describe('formatHttpResponse', () => {
|
||||
isActive: false
|
||||
},
|
||||
success: true,
|
||||
status: 'update'
|
||||
}
|
||||
],
|
||||
pullRequestUrl: 'https://github.com/org/project1/pull/123'
|
||||
|
@ -71,8 +71,8 @@ describe('ProcessorService', () => {
|
||||
{
|
||||
project,
|
||||
processedWorkitems: [
|
||||
{workitem: workitem1, success: true, status: 'update', filesWritten: []},
|
||||
{workitem: workitem2, success: true, status: 'update', filesWritten: []}
|
||||
{workitem: workitem1, success: true, filesWritten: []},
|
||||
{workitem: workitem2, success: true, filesWritten: []}
|
||||
],
|
||||
pullRequestUrl: 'https://github.com/org/test-project/pull/123',
|
||||
gitPatch: 'mock-git-patch'
|
||||
@ -147,8 +147,8 @@ describe('ProcessorService', () => {
|
||||
{
|
||||
project,
|
||||
processedWorkitems: [
|
||||
{workitem: activeWorkitem, success: true, status: 'update', filesWritten: []},
|
||||
{workitem: deactivatedWorkitem, success: true, status: 'skip', filesWritten: []}
|
||||
{workitem: activeWorkitem, success: true, filesWritten: []},
|
||||
{workitem: deactivatedWorkitem, success: true, filesWritten: []}
|
||||
],
|
||||
pullRequestUrl: 'https://github.com/org/test-project/pull/123'
|
||||
}
|
||||
|
@ -1,359 +0,0 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { ProjectService } from '../project-service';
|
||||
import { WorkitemImplementationStatus } from '../../types';
|
||||
|
||||
// Mock fs and path modules
|
||||
jest.mock('fs');
|
||||
jest.mock('path');
|
||||
|
||||
describe('ProjectService - Log Append Feature', () => {
|
||||
let projectService: ProjectService;
|
||||
const mockTimestamp = '2023-01-01T12:00:00.000Z';
|
||||
|
||||
beforeEach(() => {
|
||||
projectService = new ProjectService();
|
||||
|
||||
// Reset all mocks
|
||||
jest.resetAllMocks();
|
||||
|
||||
// Mock path.join to return predictable paths
|
||||
(path.join as jest.Mock).mockImplementation((...args) => args.join('/'));
|
||||
|
||||
// Mock Date.toISOString to return a fixed timestamp
|
||||
jest.spyOn(Date.prototype, 'toISOString').mockReturnValue(mockTimestamp);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('updateWorkitemWithImplementationLog', () => {
|
||||
it('should append logs to existing Log section', async () => {
|
||||
const workitemContent = `## Workitem Title
|
||||
|
||||
This is a description of the workitem.
|
||||
|
||||
- [x] Jira: JIRA-123
|
||||
- [ ] Implementation:
|
||||
- [x] Active
|
||||
|
||||
### Log
|
||||
|
||||
Some existing log content.
|
||||
`;
|
||||
|
||||
const workitem = {
|
||||
name: 'workitem',
|
||||
path: 'path/to/workitem.md',
|
||||
title: 'Workitem Title',
|
||||
description: 'This is a description of the workitem.',
|
||||
jiraReference: 'JIRA-123',
|
||||
implementation: '',
|
||||
isActive: true
|
||||
};
|
||||
|
||||
const status: WorkitemImplementationStatus = 'create';
|
||||
const filesWritten = ['file1.ts', 'file2.ts'];
|
||||
const filesRemoved: string[] = [];
|
||||
|
||||
// Mock fs.existsSync to return true for workitem file
|
||||
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
||||
|
||||
// Mock fs.readFileSync to return workitem content
|
||||
(fs.readFileSync as jest.Mock).mockReturnValue(workitemContent);
|
||||
|
||||
// Mock fs.writeFileSync to capture the actual output
|
||||
let actualContent = '';
|
||||
(fs.writeFileSync as jest.Mock).mockImplementation((path, content) => {
|
||||
actualContent = content;
|
||||
});
|
||||
|
||||
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
|
||||
|
||||
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
|
||||
|
||||
// Verify that fs.writeFileSync was called with the path
|
||||
expect(fs.writeFileSync).toHaveBeenCalledWith(
|
||||
'path/to/workitem.md',
|
||||
expect.any(String),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Get the actual content from the mock
|
||||
const actualContentFromMock = (fs.writeFileSync as jest.Mock).mock.calls[0][1];
|
||||
|
||||
// Verify the complete content equality
|
||||
const expectedContent = `## Workitem Title
|
||||
|
||||
This is a description of the workitem.
|
||||
|
||||
- [x] Jira: JIRA-123
|
||||
- [ ] Implementation:
|
||||
- [x] Active
|
||||
|
||||
### Log
|
||||
|
||||
${mockTimestamp} - Workitem has been implemented.
|
||||
- Created file1.ts
|
||||
- Created file2.ts
|
||||
|
||||
|
||||
Some existing log content.
|
||||
`;
|
||||
expect(actualContentFromMock).toEqual(expectedContent);
|
||||
});
|
||||
|
||||
it('should add Log section if it does not exist', async () => {
|
||||
const workitemContent = `## Workitem Title
|
||||
|
||||
This is a description of the workitem.
|
||||
|
||||
- [x] Jira: JIRA-123
|
||||
- [ ] Implementation:
|
||||
- [x] Active
|
||||
|
||||
`;
|
||||
|
||||
const workitem = {
|
||||
name: 'workitem',
|
||||
path: 'path/to/workitem.md',
|
||||
title: 'Workitem Title',
|
||||
description: 'This is a description of the workitem.',
|
||||
jiraReference: 'JIRA-123',
|
||||
implementation: '',
|
||||
isActive: true
|
||||
};
|
||||
|
||||
const status: WorkitemImplementationStatus = 'update';
|
||||
const filesWritten = ['file1.ts', 'file2.ts'];
|
||||
const filesRemoved: string[] = [];
|
||||
|
||||
// Mock fs.existsSync to return true for workitem file
|
||||
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
||||
|
||||
// Mock fs.readFileSync to return workitem content
|
||||
(fs.readFileSync as jest.Mock).mockReturnValue(workitemContent);
|
||||
|
||||
// Mock fs.writeFileSync to capture the actual output
|
||||
let actualContent = '';
|
||||
(fs.writeFileSync as jest.Mock).mockImplementation((path, content) => {
|
||||
actualContent = content;
|
||||
});
|
||||
|
||||
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
|
||||
|
||||
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
|
||||
|
||||
// Verify that fs.writeFileSync was called with the path
|
||||
expect(fs.writeFileSync).toHaveBeenCalledWith(
|
||||
'path/to/workitem.md',
|
||||
expect.any(String),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Get the actual content from the mock
|
||||
const actualContentFromMock = (fs.writeFileSync as jest.Mock).mock.calls[0][1];
|
||||
|
||||
// Verify the complete content equality
|
||||
const expectedContent = `## Workitem Title
|
||||
|
||||
This is a description of the workitem.
|
||||
|
||||
- [x] Jira: JIRA-123
|
||||
- [ ] Implementation:
|
||||
- [x] Active
|
||||
|
||||
|
||||
### Log
|
||||
|
||||
${mockTimestamp} - Workitem has been updated.
|
||||
- Created file1.ts
|
||||
- Created file2.ts
|
||||
`;
|
||||
expect(actualContentFromMock).toEqual(expectedContent);
|
||||
});
|
||||
|
||||
it('should handle different status types', async () => {
|
||||
const workitemContent = `## Workitem Title
|
||||
|
||||
This is a description of the workitem.
|
||||
|
||||
- [x] Jira: JIRA-123
|
||||
- [ ] Implementation:
|
||||
- [x] Active
|
||||
|
||||
### Log
|
||||
|
||||
Some existing log content.
|
||||
`;
|
||||
|
||||
const workitem = {
|
||||
name: 'workitem',
|
||||
path: 'path/to/workitem.md',
|
||||
title: 'Workitem Title',
|
||||
description: 'This is a description of the workitem.',
|
||||
jiraReference: 'JIRA-123',
|
||||
implementation: '',
|
||||
isActive: true
|
||||
};
|
||||
|
||||
const status: WorkitemImplementationStatus = 'delete';
|
||||
const filesWritten: string[] = [];
|
||||
const filesRemoved = ['file1.ts', 'file2.ts'];
|
||||
|
||||
// Mock fs.existsSync to return true for workitem file
|
||||
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
||||
|
||||
// Mock fs.readFileSync to return workitem content
|
||||
(fs.readFileSync as jest.Mock).mockReturnValue(workitemContent);
|
||||
|
||||
// Mock fs.writeFileSync to capture the actual output
|
||||
let actualContent = '';
|
||||
(fs.writeFileSync as jest.Mock).mockImplementation((path, content) => {
|
||||
actualContent = content;
|
||||
});
|
||||
|
||||
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
|
||||
|
||||
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
|
||||
|
||||
// Verify that fs.writeFileSync was called with the path
|
||||
expect(fs.writeFileSync).toHaveBeenCalledWith(
|
||||
'path/to/workitem.md',
|
||||
expect.any(String),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Get the actual content from the mock
|
||||
const actualContentFromMock = (fs.writeFileSync as jest.Mock).mock.calls[0][1];
|
||||
|
||||
// Verify the complete content equality
|
||||
const expectedContent = `## Workitem Title
|
||||
|
||||
This is a description of the workitem.
|
||||
|
||||
- [x] Jira: JIRA-123
|
||||
- [ ] Implementation:
|
||||
- [x] Active
|
||||
|
||||
### Log
|
||||
|
||||
${mockTimestamp} - Workitem has been deleted.
|
||||
- Removed file1.ts
|
||||
- Removed file2.ts
|
||||
|
||||
|
||||
Some existing log content.
|
||||
`;
|
||||
expect(actualContentFromMock).toEqual(expectedContent);
|
||||
});
|
||||
|
||||
it('should handle empty files array', async () => {
|
||||
const workitemContent = `## Workitem Title
|
||||
|
||||
This is a description of the workitem.
|
||||
|
||||
- [x] Jira: JIRA-123
|
||||
- [ ] Implementation:
|
||||
- [x] Active
|
||||
|
||||
### Log
|
||||
|
||||
Some existing log content.
|
||||
`;
|
||||
|
||||
const workitem = {
|
||||
name: 'workitem',
|
||||
path: 'path/to/workitem.md',
|
||||
title: 'Workitem Title',
|
||||
description: 'This is a description of the workitem.',
|
||||
jiraReference: 'JIRA-123',
|
||||
implementation: '',
|
||||
isActive: true
|
||||
};
|
||||
|
||||
const status: WorkitemImplementationStatus = 'create';
|
||||
const filesWritten: string[] = [];
|
||||
const filesRemoved: string[] = [];
|
||||
|
||||
// Mock fs.existsSync to return true for workitem file
|
||||
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
||||
|
||||
// Mock fs.readFileSync to return workitem content
|
||||
(fs.readFileSync as jest.Mock).mockReturnValue(workitemContent);
|
||||
|
||||
// Mock fs.writeFileSync to capture the actual output
|
||||
let actualContent = '';
|
||||
(fs.writeFileSync as jest.Mock).mockImplementation((path, content) => {
|
||||
actualContent = content;
|
||||
});
|
||||
|
||||
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
|
||||
|
||||
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
|
||||
|
||||
// Verify that fs.writeFileSync was called with the path
|
||||
expect(fs.writeFileSync).toHaveBeenCalledWith(
|
||||
'path/to/workitem.md',
|
||||
expect.any(String),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Get the actual content from the mock
|
||||
const actualContentFromMock = (fs.writeFileSync as jest.Mock).mock.calls[0][1];
|
||||
|
||||
// Verify the complete content equality
|
||||
const expectedContent = `## Workitem Title
|
||||
|
||||
This is a description of the workitem.
|
||||
|
||||
- [x] Jira: JIRA-123
|
||||
- [ ] Implementation:
|
||||
- [x] Active
|
||||
|
||||
### Log
|
||||
|
||||
${mockTimestamp} - Workitem has been implemented.
|
||||
|
||||
|
||||
Some existing log content.
|
||||
`;
|
||||
expect(actualContentFromMock).toEqual(expectedContent);
|
||||
});
|
||||
|
||||
it('should throw error if workitem file does not exist', async () => {
|
||||
const workitem = {
|
||||
name: 'workitem',
|
||||
path: 'path/to/workitem.md',
|
||||
title: 'Workitem Title',
|
||||
description: 'This is a description of the workitem.',
|
||||
jiraReference: 'JIRA-123',
|
||||
implementation: '',
|
||||
isActive: true
|
||||
};
|
||||
|
||||
const status: WorkitemImplementationStatus = 'create';
|
||||
const filesWritten = ['file1.ts', 'file2.ts'];
|
||||
const filesRemoved: string[] = [];
|
||||
|
||||
// Mock fs.existsSync to return false for workitem file
|
||||
(fs.existsSync as jest.Mock).mockReturnValue(false);
|
||||
|
||||
await expect(projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved))
|
||||
.rejects.toThrow('Workitem file not found: path/to/workitem.md');
|
||||
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
||||
expect(fs.readFileSync).not.toHaveBeenCalled();
|
||||
expect(fs.writeFileSync).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
@ -290,7 +290,7 @@ export class ProcessorService {
|
||||
|
||||
// Generate PR description using Gemini
|
||||
const workItemsSummary = result.processedWorkitems
|
||||
.map(item => `${item.workitem.name}: ${item.status} (${item.filesWritten?.length ?? 0} written, ${item.filesRemoved?.length ?? 0} removed)`)
|
||||
.map(item => `${item.workitem.name}: ${item.filesWritten?.length ?? 0} written, ${item.filesRemoved?.length ?? 0} removed`)
|
||||
.reduce((acc, item) => `${acc}\n${item}`, '');
|
||||
const description = await this.geminiService.generatePullRequestDescription(
|
||||
workItemsSummary,
|
||||
|
@ -5,6 +5,7 @@ import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import {Project, ProjectService as SharedProjectService} from 'shared-functions';
|
||||
import {Workitem, WorkitemImplementationStatus} from '../types';
|
||||
import {GeminiResponse} from "shared-functions/dist/services/gemini-file-system-service";
|
||||
|
||||
export class ProjectService {
|
||||
private sharedProjectService: SharedProjectService;
|
||||
@ -184,9 +185,7 @@ export class ProjectService {
|
||||
*/
|
||||
async updateWorkitemWithImplementationLog(
|
||||
workitem: Workitem,
|
||||
status: WorkitemImplementationStatus,
|
||||
filesWritten: string[] = [],
|
||||
filesRemoved: string[] = [],
|
||||
response: GeminiResponse
|
||||
): Promise<Workitem> {
|
||||
if (!fs.existsSync(workitem.path)) {
|
||||
throw new Error(`Workitem file not found: ${workitem.path}`);
|
||||
@ -198,32 +197,17 @@ export class ProjectService {
|
||||
|
||||
// Format the log message
|
||||
const timestamp = new Date().toISOString();
|
||||
let logMessage = `${timestamp} - `;
|
||||
let logMessage = `${timestamp} - Gemini updates`;
|
||||
|
||||
switch (status) {
|
||||
case 'create':
|
||||
logMessage += `Workitem has been implemented.\n`;
|
||||
break;
|
||||
case 'update':
|
||||
logMessage += `Workitem has been updated.\n`;
|
||||
break;
|
||||
case 'delete':
|
||||
logMessage += `Workitem has been deleted.\n`;
|
||||
break;
|
||||
}
|
||||
|
||||
// Add the list of files
|
||||
if (filesWritten.length > 0) {
|
||||
for (const file of filesWritten) {
|
||||
logMessage += `- Created ${file}\n`;
|
||||
}
|
||||
}
|
||||
|
||||
if (filesRemoved.length > 0) {
|
||||
for (const file of filesRemoved) {
|
||||
logMessage += `- Removed ${file}\n`;
|
||||
}
|
||||
}
|
||||
response.stepOutcomes.forEach(outcome => {
|
||||
logMessage += `\n- ${outcome.decision}: ${outcome.reason}`;
|
||||
})
|
||||
response.fileDeleted.forEach(file => {
|
||||
logMessage += `\n- Delete file ${file}`;
|
||||
})
|
||||
response.fileWritten.forEach(file => {
|
||||
logMessage += `\n- Added file ${file}`;
|
||||
})
|
||||
|
||||
// Add PR URL if available
|
||||
if (workitem.pullRequestUrl) {
|
||||
|
@ -11,6 +11,7 @@ import {
|
||||
Project,
|
||||
RepositoryService as SharedRepositoryService,
|
||||
} from 'shared-functions';
|
||||
import {GeminiResponse} from "shared-functions/dist/services/gemini-file-system-service";
|
||||
|
||||
export class ProjectWorkitemsService {
|
||||
private projectService: ProjectService;
|
||||
@ -116,47 +117,14 @@ export class ProjectWorkitemsService {
|
||||
relevantFiles
|
||||
);
|
||||
|
||||
const decision = result.decision?.decision ?? 'skip';
|
||||
// Check status consistency
|
||||
switch (decision) {
|
||||
case "skip":
|
||||
if (result.filesWritten.length > 0) {
|
||||
throw new Error(`Skip decision with files written: ${result.filesWritten.join(', ')}`);
|
||||
}
|
||||
if (result.filesDeleted.length > 0) {
|
||||
throw new Error(`Skip decision with files deleted: ${result.filesDeleted.join(', ')}`);
|
||||
}
|
||||
break;
|
||||
case "create":
|
||||
if (result.filesWritten.length === 0) {
|
||||
throw new Error(`Create decision with no files written`);
|
||||
}
|
||||
break;
|
||||
case "update":
|
||||
if (result.filesWritten.length === 0) {
|
||||
throw new Error(`Update decision with no files written`);
|
||||
}
|
||||
break;
|
||||
case "delete":
|
||||
if (result.filesDeleted.length === 0) {
|
||||
throw new Error(`Delete decision with no files deleted`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
const hasChanges = result.fileWritten.length > 0 || result.fileDeleted.length > 0;
|
||||
// Update the workitem file with implementation log
|
||||
if (decision !== 'skip') {
|
||||
if (hasChanges) {
|
||||
try {
|
||||
// Determine the log status based on the operation status
|
||||
const logStatus = decision;
|
||||
|
||||
// Update the workitem file with implementation log
|
||||
await this.projectService.updateWorkitemWithImplementationLog(
|
||||
workitem,
|
||||
logStatus,
|
||||
result.filesWritten,
|
||||
result.filesDeleted
|
||||
result
|
||||
);
|
||||
|
||||
console.log(`ProjectWorkitemsService: Updated workitem file with implementation log for ${workitem.name}`);
|
||||
@ -165,13 +133,12 @@ export class ProjectWorkitemsService {
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`ProjectWorkitemsService: Completed processing workitem: ${workitem.name} (Status: ${decision}, Files written: ${result.filesWritten.length})`);
|
||||
console.log(`ProjectWorkitemsService: Completed processing workitem: ${workitem.name} (Files written: ${result.fileWritten.length})`);
|
||||
return {
|
||||
success: true,
|
||||
status: decision,
|
||||
workitem,
|
||||
filesWritten: result.filesWritten,
|
||||
filesRemoved: result.filesDeleted,
|
||||
filesWritten: result.fileWritten,
|
||||
filesRemoved: result.fileDeleted,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error processing workitem ${workitem.name}:`, error);
|
||||
@ -229,26 +196,17 @@ export class ProjectWorkitemsService {
|
||||
workitemContent: string,
|
||||
workitemName: string,
|
||||
relevantFiles: Record<string, string> = {}
|
||||
): Promise<{
|
||||
text: string;
|
||||
decision?: { decision: 'create' | 'update' | 'delete' | 'skip'; reason: string };
|
||||
filesWritten: string[];
|
||||
filesDeleted: string[];
|
||||
}> {
|
||||
): Promise<GeminiResponse> {
|
||||
const currentDate = new Date().toISOString();
|
||||
|
||||
// If dry run is enabled, return a mock feature file
|
||||
if (DRY_RUN_SKIP_GEMINI) {
|
||||
console.log(`[DRY RUN] Skipping Gemini API call for generating feature file for ${workitemName}`);
|
||||
const mockText = `# Generated by prompts-to-test-spec on ${currentDate} (DRY RUN)`;
|
||||
return {
|
||||
text: mockText,
|
||||
decision: {
|
||||
decision: 'create',
|
||||
reason: 'This is a mock decision for dry run mode'
|
||||
},
|
||||
filesWritten: [],
|
||||
filesDeleted: []
|
||||
fileWritten: [],
|
||||
fileDeleted: [],
|
||||
stepOutcomes: [],
|
||||
modelResponses: []
|
||||
};
|
||||
}
|
||||
|
||||
@ -284,11 +242,6 @@ export class ProjectWorkitemsService {
|
||||
projectRepoPath
|
||||
);
|
||||
|
||||
return {
|
||||
text: result.text,
|
||||
decision: result.decision,
|
||||
filesWritten: result.filesWritten,
|
||||
filesDeleted: result.filesDeleted
|
||||
};
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
@ -31,7 +31,6 @@ export interface ProcessedWorkItem {
|
||||
workitem: Workitem;
|
||||
success: boolean;
|
||||
error?: string;
|
||||
status?: 'create' | 'update' | 'delete' | 'skip';
|
||||
filesWritten?: string[];
|
||||
filesRemoved?: string[];
|
||||
}
|
||||
@ -65,9 +64,6 @@ export interface ProjectSummary {
|
||||
success: boolean;
|
||||
error?: string;
|
||||
workitemsProcessed: number;
|
||||
workitemsSkipped: number;
|
||||
workitemsUpdated: number;
|
||||
workitemsCreated: number;
|
||||
filesWritten: number;
|
||||
pullRequestUrl?: string;
|
||||
gitPatch?: string;
|
||||
|
@ -0,0 +1,358 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { GeminiFileSystemService } from '../gemini-file-system-service';
|
||||
|
||||
// Mock fs and path modules
|
||||
jest.mock('fs');
|
||||
jest.mock('path');
|
||||
|
||||
describe('GeminiFileSystemService', () => {
|
||||
let service: GeminiFileSystemService;
|
||||
const mockProjectId = 'test-project-id';
|
||||
|
||||
beforeEach(() => {
|
||||
service = new GeminiFileSystemService(mockProjectId);
|
||||
|
||||
// Reset all mocks
|
||||
jest.resetAllMocks();
|
||||
|
||||
// Mock path.join to return predictable paths
|
||||
(path.join as jest.Mock).mockImplementation((...args) => args.join('/'));
|
||||
|
||||
// Mock path.relative to return predictable relative paths
|
||||
(path.relative as jest.Mock).mockImplementation((from, to) => {
|
||||
return to.replace(`${from}/`, '');
|
||||
});
|
||||
});
|
||||
|
||||
describe('grepFiles', () => {
|
||||
it('should throw an error if search string is not provided', () => {
|
||||
expect(() => {
|
||||
service.grepFiles('/root', '');
|
||||
}).toThrow('Search string is required');
|
||||
});
|
||||
|
||||
it('should search for a string in files', () => {
|
||||
// Mock directory structure
|
||||
const mockFiles: Record<string, string> = {
|
||||
'/root/file1.ts': 'const x = 1;\nconst searchTerm = "found";\nconst y = 2;',
|
||||
'/root/file2.ts': 'const z = 3;\nconst searchTerm = "not found";\nconst w = 4;',
|
||||
'/root/subdir/file3.ts': 'const a = 5;\nconst searchTerm = "found";\nconst b = 6;',
|
||||
};
|
||||
|
||||
// Mock fs.readdirSync to return directory entries
|
||||
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
|
||||
if (dirPath === '/root') {
|
||||
return [
|
||||
{ name: 'file1.ts', isDirectory: () => false, isFile: () => true },
|
||||
{ name: 'file2.ts', isDirectory: () => false, isFile: () => true },
|
||||
{ name: 'subdir', isDirectory: () => true, isFile: () => false },
|
||||
];
|
||||
} else if (dirPath === '/root/subdir') {
|
||||
return [
|
||||
{ name: 'file3.ts', isDirectory: () => false, isFile: () => true },
|
||||
];
|
||||
}
|
||||
return [];
|
||||
});
|
||||
|
||||
// Mock fs.readFileSync to return file content
|
||||
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
|
||||
return mockFiles[filePath] || '';
|
||||
});
|
||||
|
||||
const results = service.grepFiles('/root', 'found');
|
||||
|
||||
// The implementation matches substrings, so "not found" also matches
|
||||
expect(results).toHaveLength(3);
|
||||
expect(results[0]).toEqual({
|
||||
file: 'file1.ts',
|
||||
line: 2,
|
||||
content: 'const searchTerm = "found";'
|
||||
});
|
||||
expect(results[1]).toEqual({
|
||||
file: 'file2.ts',
|
||||
line: 2,
|
||||
content: 'const searchTerm = "not found";'
|
||||
});
|
||||
expect(results[2]).toEqual({
|
||||
file: 'subdir/file3.ts',
|
||||
line: 2,
|
||||
content: 'const searchTerm = "found";'
|
||||
});
|
||||
});
|
||||
|
||||
it('should search for a string with wildcard', () => {
|
||||
// Mock directory structure
|
||||
const mockFiles: Record<string, string> = {
|
||||
'/root/file1.ts': 'const x = 1;\nconst searchTerm = "found";\nconst y = 2;',
|
||||
'/root/file2.ts': 'const z = 3;\nconst searchTerm = "not found";\nconst w = 4;',
|
||||
'/root/file3.ts': 'const a = 5;\nconst searchPrefix = "prefound";\nconst b = 6;',
|
||||
};
|
||||
|
||||
// Mock fs.readdirSync to return directory entries
|
||||
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
|
||||
if (dirPath === '/root') {
|
||||
return [
|
||||
{ name: 'file1.ts', isDirectory: () => false, isFile: () => true },
|
||||
{ name: 'file2.ts', isDirectory: () => false, isFile: () => true },
|
||||
{ name: 'file3.ts', isDirectory: () => false, isFile: () => true },
|
||||
];
|
||||
}
|
||||
return [];
|
||||
});
|
||||
|
||||
// Mock fs.readFileSync to return file content
|
||||
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
|
||||
return mockFiles[filePath] || '';
|
||||
});
|
||||
|
||||
const results = service.grepFiles('/root', '*found*');
|
||||
|
||||
expect(results).toHaveLength(3);
|
||||
expect(results[0]).toEqual({
|
||||
file: 'file1.ts',
|
||||
line: 2,
|
||||
content: 'const searchTerm = "found";'
|
||||
});
|
||||
expect(results[1]).toEqual({
|
||||
file: 'file2.ts',
|
||||
line: 2,
|
||||
content: 'const searchTerm = "not found";'
|
||||
});
|
||||
expect(results[2]).toEqual({
|
||||
file: 'file3.ts',
|
||||
line: 2,
|
||||
content: 'const searchPrefix = "prefound";'
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter files by pattern', () => {
|
||||
// Mock directory structure
|
||||
const mockFiles: Record<string, string> = {
|
||||
'/root/file1.ts': 'const x = 1;\nconst searchTerm = "found";\nconst y = 2;',
|
||||
'/root/file2.js': 'const z = 3;\nconst searchTerm = "found";\nconst w = 4;',
|
||||
'/root/subdir/file3.ts': 'const a = 5;\nconst searchTerm = "found";\nconst b = 6;',
|
||||
};
|
||||
|
||||
// Mock fs.readdirSync to return directory entries
|
||||
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
|
||||
if (dirPath === '/root') {
|
||||
return [
|
||||
{ name: 'file1.ts', isDirectory: () => false, isFile: () => true },
|
||||
{ name: 'file2.js', isDirectory: () => false, isFile: () => true },
|
||||
{ name: 'subdir', isDirectory: () => true, isFile: () => false },
|
||||
];
|
||||
} else if (dirPath === '/root/subdir') {
|
||||
return [
|
||||
{ name: 'file3.ts', isDirectory: () => false, isFile: () => true },
|
||||
];
|
||||
}
|
||||
return [];
|
||||
});
|
||||
|
||||
// Mock fs.readFileSync to return file content
|
||||
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
|
||||
return mockFiles[filePath] || '';
|
||||
});
|
||||
|
||||
// Mock matchesPattern to use the actual implementation
|
||||
jest.spyOn(service as any, 'matchesPattern').mockImplementation((...args: unknown[]) => {
|
||||
// Simple implementation for testing
|
||||
const filename = args[0] as string;
|
||||
const pattern = args[1] as string;
|
||||
const regexPattern = pattern
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
|
||||
.replace(/\*/g, '.*');
|
||||
const regex = new RegExp(`^${regexPattern}$`);
|
||||
return regex.test(filename);
|
||||
});
|
||||
|
||||
const results = service.grepFiles('/root', 'found', '*.ts');
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0]).toEqual({
|
||||
file: 'file1.ts',
|
||||
line: 2,
|
||||
content: 'const searchTerm = "found";'
|
||||
});
|
||||
expect(results[1]).toEqual({
|
||||
file: 'subdir/file3.ts',
|
||||
line: 2,
|
||||
content: 'const searchTerm = "found";'
|
||||
});
|
||||
});
|
||||
|
||||
it('should skip node_modules and .git directories', () => {
|
||||
// Mock directory structure
|
||||
const mockFiles: Record<string, string> = {
|
||||
'/root/file1.ts': 'const x = 1;\nconst searchTerm = "found";\nconst y = 2;',
|
||||
'/root/node_modules/file2.ts': 'const z = 3;\nconst searchTerm = "found";\nconst w = 4;',
|
||||
'/root/.git/file3.ts': 'const a = 5;\nconst searchTerm = "found";\nconst b = 6;',
|
||||
};
|
||||
|
||||
// Mock fs.readdirSync to return directory entries
|
||||
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
|
||||
if (dirPath === '/root') {
|
||||
return [
|
||||
{ name: 'file1.ts', isDirectory: () => false, isFile: () => true },
|
||||
{ name: 'node_modules', isDirectory: () => true, isFile: () => false },
|
||||
{ name: '.git', isDirectory: () => true, isFile: () => false },
|
||||
];
|
||||
}
|
||||
return [];
|
||||
});
|
||||
|
||||
// Mock fs.readFileSync to return file content
|
||||
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
|
||||
return mockFiles[filePath] || '';
|
||||
});
|
||||
|
||||
const results = service.grepFiles('/root', 'found');
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]).toEqual({
|
||||
file: 'file1.ts',
|
||||
line: 2,
|
||||
content: 'const searchTerm = "found";'
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle file read errors gracefully', () => {
|
||||
// Mock directory structure
|
||||
(fs.readdirSync as jest.Mock).mockImplementation((dirPath, options) => {
|
||||
if (dirPath === '/root') {
|
||||
return [
|
||||
{ name: 'file1.ts', isDirectory: () => false, isFile: () => true },
|
||||
{ name: 'file2.ts', isDirectory: () => false, isFile: () => true },
|
||||
];
|
||||
}
|
||||
return [];
|
||||
});
|
||||
|
||||
// Mock fs.readFileSync to throw an error for one file
|
||||
(fs.readFileSync as jest.Mock).mockImplementation((filePath, encoding) => {
|
||||
if (filePath === '/root/file1.ts') {
|
||||
return 'const searchTerm = "found";';
|
||||
} else if (filePath === '/root/file2.ts') {
|
||||
throw new Error('File read error');
|
||||
}
|
||||
return '';
|
||||
});
|
||||
|
||||
const results = service.grepFiles('/root', 'found');
|
||||
|
||||
// Should still return results from the file that could be read
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]).toEqual({
|
||||
file: 'file1.ts',
|
||||
line: 1,
|
||||
content: 'const searchTerm = "found";'
|
||||
});
|
||||
});
|
||||
|
||||
it('should match "Ws*Document*Controller" with "WsCustomerDocumentController"', () => {
|
||||
// Mock directory structure
|
||||
const mockFiles: Record<string, string> = {
|
||||
'/root/controller.ts': 'import { WsCustomerDocumentController } from "./controllers";',
|
||||
};
|
||||
|
||||
// Mock fs.readdirSync to return directory entries
|
||||
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
|
||||
if (dirPath === '/root') {
|
||||
return [
|
||||
{ name: 'controller.ts', isDirectory: () => false, isFile: () => true },
|
||||
];
|
||||
}
|
||||
return [];
|
||||
});
|
||||
|
||||
// Mock fs.readFileSync to return file content
|
||||
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
|
||||
return mockFiles[filePath] || '';
|
||||
});
|
||||
|
||||
const results = service.grepFiles('/root', 'Ws*Document*Controller');
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]).toEqual({
|
||||
file: 'controller.ts',
|
||||
line: 1,
|
||||
content: 'import { WsCustomerDocumentController } from "./controllers";'
|
||||
});
|
||||
});
|
||||
|
||||
it('should match "class Ws*Document*Controller" with filePattern "nitro-domain-api/src/main/java/**"', () => {
|
||||
// Mock directory structure
|
||||
const mockFiles: Record<string, string> = {
|
||||
'/root/nitro-domain-api/src/main/java/be/test/WsCustomerDocumentController.java': 'package be.test;\n\npublic class WsCustomerDocumentController {\n // Class implementation\n}',
|
||||
'/root/some-other-path/SomeOtherFile.java': 'package some.other.path;\n\npublic class WsCustomerDocumentController {\n // Should not match due to file pattern\n}',
|
||||
};
|
||||
|
||||
// Mock fs.readdirSync to return directory entries
|
||||
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
|
||||
if (dirPath === '/root') {
|
||||
return [
|
||||
{ name: 'nitro-domain-api', isDirectory: () => true, isFile: () => false },
|
||||
{ name: 'some-other-path', isDirectory: () => true, isFile: () => false },
|
||||
];
|
||||
} else if (dirPath === '/root/nitro-domain-api') {
|
||||
return [
|
||||
{ name: 'src', isDirectory: () => true, isFile: () => false },
|
||||
];
|
||||
} else if (dirPath === '/root/nitro-domain-api/src') {
|
||||
return [
|
||||
{ name: 'main', isDirectory: () => true, isFile: () => false },
|
||||
];
|
||||
} else if (dirPath === '/root/nitro-domain-api/src/main') {
|
||||
return [
|
||||
{ name: 'java', isDirectory: () => true, isFile: () => false },
|
||||
];
|
||||
} else if (dirPath === '/root/nitro-domain-api/src/main/java') {
|
||||
return [
|
||||
{ name: 'be', isDirectory: () => true, isFile: () => false },
|
||||
];
|
||||
} else if (dirPath === '/root/nitro-domain-api/src/main/java/be') {
|
||||
return [
|
||||
{ name: 'test', isDirectory: () => true, isFile: () => false },
|
||||
];
|
||||
} else if (dirPath === '/root/nitro-domain-api/src/main/java/be/test') {
|
||||
return [
|
||||
{ name: 'WsCustomerDocumentController.java', isDirectory: () => false, isFile: () => true },
|
||||
];
|
||||
} else if (dirPath === '/root/some-other-path') {
|
||||
return [
|
||||
{ name: 'SomeOtherFile.java', isDirectory: () => false, isFile: () => true },
|
||||
];
|
||||
}
|
||||
return [];
|
||||
});
|
||||
|
||||
// Mock fs.readFileSync to return file content
|
||||
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
|
||||
return mockFiles[filePath] || '';
|
||||
});
|
||||
|
||||
// Mock matchesPattern to use the actual implementation
|
||||
jest.spyOn(service as any, 'matchesPattern').mockImplementation((...args: unknown[]) => {
|
||||
// Simple implementation for testing
|
||||
const filename = args[0] as string;
|
||||
const pattern = args[1] as string;
|
||||
const regexPattern = pattern
|
||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
|
||||
.replace(/\*/g, '.*');
|
||||
const regex = new RegExp(`^${regexPattern}$`);
|
||||
return regex.test(filename);
|
||||
});
|
||||
|
||||
const results = service.grepFiles('/root', 'class Ws*Document*Controller', 'nitro-domain-api/src/main/java/**');
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]).toEqual({
|
||||
file: 'nitro-domain-api/src/main/java/be/test/WsCustomerDocumentController.java',
|
||||
line: 3,
|
||||
content: 'public class WsCustomerDocumentController {'
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
@ -4,9 +4,12 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import {
|
||||
Content,
|
||||
FunctionCall,
|
||||
FunctionDeclarationSchemaType,
|
||||
GenerateContentCandidate,
|
||||
FunctionResponse,
|
||||
GenerateContentRequest,
|
||||
GenerativeModel,
|
||||
Tool,
|
||||
VertexAI
|
||||
} from '@google-cloud/vertexai';
|
||||
@ -24,23 +27,14 @@ export interface FunctionArgs {
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for the model response format
|
||||
*/
|
||||
export interface ModelResponse {
|
||||
export interface GeminiResponse {
|
||||
fileWritten: string[];
|
||||
fileDeleted: string[];
|
||||
stepOutcomes: {
|
||||
decision: 'create' | 'update' | 'delete' | 'skip';
|
||||
reason: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for the result of processing a model stream
|
||||
*/
|
||||
export interface ModelStreamResult {
|
||||
text: string;
|
||||
decision?: ModelResponse;
|
||||
}[];
|
||||
modelResponses: string[];
|
||||
filesWritten: string[];
|
||||
filesDeleted: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
@ -148,7 +142,7 @@ export class GeminiFileSystemService {
|
||||
},
|
||||
filePattern: {
|
||||
type: FunctionDeclarationSchemaType.STRING,
|
||||
description: "Optional file pattern to limit the search (e.g., '*.ts', 'src/*.java')"
|
||||
description: "Optional glob file pattern to limit the search (e.g., '*.ts', 'src/*.java')"
|
||||
}
|
||||
},
|
||||
required: ["searchString"]
|
||||
@ -169,14 +163,14 @@ export class GeminiFileSystemService {
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "reportFinalOutcome",
|
||||
description: "Submit the final outcome for compliance with guidelines. Can only be called once.",
|
||||
name: "reportStepOutcome",
|
||||
description: "Submit the outcome for a step in compliance with guidelines. Can be called multiple times.",
|
||||
parameters: {
|
||||
type: FunctionDeclarationSchemaType.OBJECT,
|
||||
properties: {
|
||||
outcome: {
|
||||
type: FunctionDeclarationSchemaType.STRING,
|
||||
description: "The final outcome: 'create', 'update', 'delete', or 'skip'",
|
||||
description: "The step outcome: 'create', 'update', 'delete', or 'skip'",
|
||||
enum: ["create", "update", "delete", "skip"]
|
||||
},
|
||||
reason: {
|
||||
@ -253,24 +247,58 @@ export class GeminiFileSystemService {
|
||||
}
|
||||
|
||||
/**
|
||||
* List files in a directory
|
||||
* List files in a directory, optionally with a glob pattern and recursion
|
||||
* @param rootPath Root path of the filesystem
|
||||
* @param dirPath Path to the directory relative to the root path
|
||||
* @returns Array of file names
|
||||
* @param pattern Optional glob pattern to filter files
|
||||
* @returns Array of file paths relative to dirPath
|
||||
*/
|
||||
listFiles(rootPath: string, dirPath: string): string[] {
|
||||
console.debug(" - listFiles called with dirPath: " + dirPath);
|
||||
listFiles(rootPath: string, dirPath: string, pattern?: string): string[] {
|
||||
console.debug(" - listFiles called with dirPath: " + dirPath + (pattern ? ", pattern: " + pattern : ""));
|
||||
const fullPath = path.join(rootPath, dirPath);
|
||||
if (!fs.existsSync(fullPath)) {
|
||||
throw new Error(`Directory not found: ${dirPath}`);
|
||||
}
|
||||
return fs.readdirSync(fullPath);
|
||||
|
||||
const results: string[] = [];
|
||||
|
||||
// Helper function to recursively list files in a directory
|
||||
const listFilesInDirectory = (currentPath: string, basePath: string) => {
|
||||
try {
|
||||
const entries = fs.readdirSync(currentPath, {withFileTypes: true});
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryPath = path.join(currentPath, entry.name);
|
||||
const relativePath = path.relative(basePath, entryPath);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
// If pattern includes ** (recursive glob), recurse into subdirectories
|
||||
if (pattern && pattern.includes('**')) {
|
||||
listFilesInDirectory(entryPath, basePath);
|
||||
}
|
||||
} else if (entry.isFile()) {
|
||||
// Check if the file matches the pattern
|
||||
if (!pattern || this.matchesPattern(relativePath, pattern)) {
|
||||
results.push(relativePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Silently ignore directory read errors
|
||||
}
|
||||
};
|
||||
|
||||
// Start listing from the specified directory
|
||||
listFilesInDirectory(fullPath, fullPath);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for a string in files
|
||||
* @param rootPath Root path to search in
|
||||
* @param searchString String to search for
|
||||
* @param filePattern Optional file pattern to limit the search (e.g., "*.ts", "src/*.java")
|
||||
* @param searchString String to search for. * can be used for wildcards
|
||||
* @param filePattern Optional file pattern to limit the search (e.g., "*.ts", "src/*.java", "src/**")
|
||||
* @returns Array of matches with file paths and line numbers
|
||||
* @throws Error if search string is not provided
|
||||
*/
|
||||
@ -292,8 +320,11 @@ export class GeminiFileSystemService {
|
||||
const content = fs.readFileSync(filePath, 'utf-8');
|
||||
const lines = content.split('\n');
|
||||
|
||||
const pattern = searchString.replace(/\*/g, '.*'); // Convert * to .*
|
||||
const regex = new RegExp(`.*${pattern}.*`);
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
if (lines[i].includes(searchString)) {
|
||||
if (regex.test(lines[i])) {
|
||||
results.push({
|
||||
file: relativePath,
|
||||
line: i + 1, // 1-based line numbers
|
||||
@ -367,22 +398,15 @@ export class GeminiFileSystemService {
|
||||
guidelines: string,
|
||||
additionalContent: string,
|
||||
rootPath: string
|
||||
): Promise<ModelStreamResult> {
|
||||
const currentDate = new Date().toISOString();
|
||||
|
||||
): Promise<GeminiResponse> {
|
||||
// If dry run is enabled, return a mock result
|
||||
if (this.dryRun) {
|
||||
console.log(`[DRY RUN] Skipping Gemini API call for processing`);
|
||||
const mockText = `# Generated on ${currentDate} (DRY RUN)`;
|
||||
return {
|
||||
text: mockText,
|
||||
decision: {
|
||||
decision: 'create',
|
||||
reason: 'This is a mock decision for dry run mode'
|
||||
},
|
||||
stepOutcomes: [],
|
||||
fileDeleted: [],
|
||||
modelResponses: [],
|
||||
filesWritten: [],
|
||||
filesDeleted: []
|
||||
fileWritten: []
|
||||
};
|
||||
}
|
||||
|
||||
@ -398,18 +422,18 @@ ${additionalContent}
|
||||
|
||||
You have access to the following function calls to help you understand the project structure and create implementations:
|
||||
- getFileContent(filePath): Get the content of a file in the project repository
|
||||
- writeFileContent(filePath, content): Write content to a file in the project repository
|
||||
- writeFileContent(filePath, content): Write content to a file in the project repository (create or update)
|
||||
- fileExists(filePath): Check if a file exists in the project repository
|
||||
- listFiles(dirPath): List files in a directory in the project repository
|
||||
- grepFiles(searchString, filePattern): Search for a string in project files, optionally filtered by a file pattern
|
||||
- grepFiles(searchString, filePattern): Search for a string in project files, optionally filtered by a file pattern (glob)
|
||||
use filePattern='path/**' to search recursively in all files under path.
|
||||
- deleteFile(filePath): Delete a file from the project repository
|
||||
|
||||
IMPORTANT: First use the function calls above to comply with the guidelines. Create, update, or delete all required files.
|
||||
|
||||
Then, once finished with all the guidelines above, use this function once to report the overall outcome:
|
||||
- reportFinalOutcome(outcome, reason): Outcome must be one of: 'create', 'update', 'delete', 'skip'
|
||||
You can use this function to report the outcome of each step as you work through the guidelines:
|
||||
- reportStepOutcome(outcome, reason): Outcome must be one of: 'create', 'update', 'delete', 'skip'
|
||||
|
||||
You won't be able to update other files once you've made a decision.
|
||||
`;
|
||||
|
||||
// Instantiate the model with our file operation tools
|
||||
@ -417,9 +441,7 @@ You won't be able to update other files once you've made a decision.
|
||||
model: this.model,
|
||||
tools: this.fileOperationTools,
|
||||
generation_config: {
|
||||
temperature: 0.3, // Very low temperature for more deterministic responses
|
||||
top_p: 0.8, // Higher top_p to allow more diverse completions when needed
|
||||
top_k: 60, // Consider only the top 40 tokens
|
||||
temperature: 0.2, // Very low temperature for more deterministic responses
|
||||
},
|
||||
});
|
||||
|
||||
@ -430,199 +452,29 @@ You won't be able to update other files once you've made a decision.
|
||||
],
|
||||
tools: this.fileOperationTools,
|
||||
};
|
||||
const geminiResponse = await this.handleGeminiStream(generativeModel, request, rootPath);
|
||||
|
||||
// Generate content in a streaming fashion
|
||||
const streamingResp = await generativeModel.generateContentStream(request);
|
||||
console.debug("--- Gemini response:");
|
||||
geminiResponse.modelResponses.forEach(r => {
|
||||
console.debug(r);
|
||||
})
|
||||
console.debug("---");
|
||||
|
||||
// Track state within the method scope
|
||||
const filesWritten: string[] = [];
|
||||
const filesDeleted: string[] = [];
|
||||
const modelResponses: string[] = [];
|
||||
let decision: ModelResponse | undefined;
|
||||
let finalResponse = '';
|
||||
let pendingFunctionCalls = [];
|
||||
|
||||
// Process the streaming response
|
||||
for await (const item of streamingResp.stream) {
|
||||
// Check if there's a function call in any part of the response
|
||||
let functionCall = null;
|
||||
let textContent = '';
|
||||
|
||||
// Iterate over every part in the response
|
||||
for (const part of item.candidates?.[0]?.content?.parts || []) {
|
||||
if (part.functionCall) {
|
||||
functionCall = part.functionCall;
|
||||
break;
|
||||
} else if (part.text) {
|
||||
textContent += part.text;
|
||||
}
|
||||
}
|
||||
|
||||
if (functionCall) {
|
||||
pendingFunctionCalls.push(functionCall);
|
||||
} else if (textContent) {
|
||||
// If there's text, append it to the final response
|
||||
finalResponse += textContent;
|
||||
modelResponses.push(textContent);
|
||||
console.debug("- received text: " + textContent);
|
||||
}
|
||||
}
|
||||
|
||||
// Process any function calls that were detected
|
||||
if (pendingFunctionCalls.length > 0) {
|
||||
let currentRequest: GenerateContentRequest = request;
|
||||
|
||||
// Process each function call
|
||||
for (const functionCall of pendingFunctionCalls) {
|
||||
const functionName = functionCall.name;
|
||||
const functionArgs = (typeof functionCall.args === 'string' ?
|
||||
JSON.parse(functionCall.args) : functionCall.args) as FunctionArgs;
|
||||
|
||||
let functionResponse;
|
||||
try {
|
||||
// Execute the function
|
||||
switch (functionName) {
|
||||
case 'getFileContent':
|
||||
functionResponse = this.getFileContent(rootPath, functionArgs.filePath!);
|
||||
break;
|
||||
case 'writeFileContent':
|
||||
this.writeFileContent(rootPath, functionArgs.filePath!, functionArgs.content!);
|
||||
functionResponse = `File ${functionArgs.filePath} written successfully`;
|
||||
// Track the file written
|
||||
filesWritten.push(functionArgs.filePath!);
|
||||
break;
|
||||
case 'fileExists':
|
||||
functionResponse = this.fileExists(rootPath, functionArgs.filePath!);
|
||||
break;
|
||||
case 'listFiles':
|
||||
functionResponse = this.listFiles(rootPath, functionArgs.dirPath!);
|
||||
break;
|
||||
case 'grepFiles':
|
||||
functionResponse = this.grepFiles(rootPath, functionArgs.searchString!, functionArgs.filePattern);
|
||||
break;
|
||||
case 'deleteFile':
|
||||
functionResponse = this.deleteFile(rootPath, functionArgs.filePath!);
|
||||
// Track the file deleted
|
||||
filesDeleted.push(functionArgs.filePath!);
|
||||
break;
|
||||
case 'reportFinalOutcome':
|
||||
console.debug(`- received reportFinalOutcome function call: ${functionArgs.outcome} - ${functionArgs.reason}`);
|
||||
// Store the decision
|
||||
decision = {
|
||||
decision: functionArgs.outcome!,
|
||||
reason: functionArgs.reason!
|
||||
};
|
||||
functionResponse = `Outcome recorded: ${functionArgs.outcome} - ${functionArgs.reason}`;
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown function: ${functionName}`);
|
||||
return geminiResponse;
|
||||
}
|
||||
|
||||
private createFunctionExchangeContents(
|
||||
functionCall: FunctionCall,
|
||||
responseData: any,
|
||||
): Content[] {
|
||||
// Create a function response object
|
||||
const functionResponseObj = {
|
||||
name: functionName,
|
||||
response: {result: JSON.stringify(functionResponse)}
|
||||
const functionResponseObj: FunctionResponse = {
|
||||
name: functionCall.name,
|
||||
response: {
|
||||
data: JSON.stringify(responseData),
|
||||
},
|
||||
};
|
||||
|
||||
// Update the request with the function call and response
|
||||
currentRequest = this.createNextRequest(currentRequest, functionCall, functionResponseObj);
|
||||
|
||||
// Generate the next response
|
||||
const nextStreamingResp = await generativeModel.generateContentStream(currentRequest);
|
||||
|
||||
// Process the next streaming response
|
||||
const nextResult = await this.processNextStreamingResponse(nextStreamingResp);
|
||||
|
||||
// Update state
|
||||
finalResponse += nextResult.textContent;
|
||||
if (nextResult.textContent) {
|
||||
modelResponses.push(nextResult.textContent);
|
||||
}
|
||||
if (nextResult.functionCall) {
|
||||
if (decision != null) {
|
||||
console.warn(`Received another function call for ${nextResult.functionCall.name}, but a decision hsa been recorded. Ignoring stream`);
|
||||
break;
|
||||
}
|
||||
pendingFunctionCalls.push(nextResult.functionCall);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
let errorMessage = error instanceof Error ? error.message : String(error);
|
||||
console.error(`Error executing function ${functionName}: ${errorMessage}`);
|
||||
|
||||
// Create an error response object
|
||||
const errorResponseObj = {
|
||||
name: functionName,
|
||||
response: {error: errorMessage}
|
||||
};
|
||||
|
||||
// Update the request with the function call and error response
|
||||
currentRequest = this.createNextRequest(currentRequest, functionCall, errorResponseObj, true);
|
||||
|
||||
// Generate the next response
|
||||
const nextStreamingResp = await generativeModel.generateContentStream(currentRequest);
|
||||
|
||||
// Process the next streaming response
|
||||
const nextResult = await this.processNextStreamingResponse(nextStreamingResp, true);
|
||||
|
||||
// Update state
|
||||
finalResponse += nextResult.textContent;
|
||||
if (nextResult.textContent) {
|
||||
modelResponses.push(nextResult.textContent);
|
||||
}
|
||||
if (nextResult.functionCall) {
|
||||
if (decision != null) {
|
||||
console.warn(`Received another function call for ${nextResult.functionCall.name}, but a decision hsa been recorded. Ignoring stream`);
|
||||
break;
|
||||
}
|
||||
pendingFunctionCalls.push(nextResult.functionCall);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no explicit decision was made using the reportFinalOutcome function, try to parse it from the text
|
||||
if (!decision) {
|
||||
console.warn(`No decision function call made during the stream session`);
|
||||
try {
|
||||
// Try to parse a JSON decision from the text
|
||||
const jsonMatch = finalResponse.match(/\{[\s\S]*"decision"[\s\S]*\}/);
|
||||
if (jsonMatch) {
|
||||
decision = JSON.parse(jsonMatch[0]) as ModelResponse;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error parsing JSON decision:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
console.debug(`- Completed gemini stream processing. Final response: ${decision?.decision} - ${decision?.reason}`);
|
||||
|
||||
return {
|
||||
text: finalResponse,
|
||||
decision: decision ?? {decision: "skip", reason: "No decision received/parsed"},
|
||||
modelResponses: modelResponses,
|
||||
filesWritten: filesWritten,
|
||||
filesDeleted: filesDeleted
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the next request with function call and response
|
||||
* @param currentRequest Current request
|
||||
* @param functionCall Function call object
|
||||
* @param functionResponseObj Function response object
|
||||
* @param isError Whether the response is an error
|
||||
* @returns Next request
|
||||
*/
|
||||
private createNextRequest(
|
||||
currentRequest: GenerateContentRequest,
|
||||
functionCall: any,
|
||||
functionResponseObj: any,
|
||||
isError: boolean = false
|
||||
): GenerateContentRequest {
|
||||
return {
|
||||
contents: [
|
||||
...currentRequest.contents,
|
||||
return [
|
||||
{
|
||||
role: 'ASSISTANT',
|
||||
parts: [
|
||||
@ -639,40 +491,133 @@ You won't be able to update other files once you've made a decision.
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
];
|
||||
}
|
||||
|
||||
private processFunctionCall(functionCall: FunctionCall, rootPath: string, callbacks: {
|
||||
onFileWritten: (file: string) => any;
|
||||
onFileDelete: (file: string) => any;
|
||||
onStepOutcome: (outcome: 'create' | 'update' | 'delete' | 'skip', reason: string) => any
|
||||
}): string | string[] | boolean | any {
|
||||
const functionName = functionCall.name;
|
||||
try {
|
||||
const functionArgs = (typeof functionCall.args === 'string' ?
|
||||
JSON.parse(functionCall.args) : functionCall.args) as FunctionArgs;
|
||||
|
||||
let functionResponse: string | string[] | boolean | any;
|
||||
// Execute the function
|
||||
switch (functionName) {
|
||||
case 'getFileContent':
|
||||
functionResponse = this.getFileContent(rootPath, functionArgs.filePath!);
|
||||
break;
|
||||
case 'writeFileContent':
|
||||
this.writeFileContent(rootPath, functionArgs.filePath!, functionArgs.content!);
|
||||
functionResponse = `File ${functionArgs.filePath} written successfully`;
|
||||
// Track the file written
|
||||
callbacks.onFileWritten(functionArgs.filePath!);
|
||||
break;
|
||||
case 'fileExists':
|
||||
functionResponse = this.fileExists(rootPath, functionArgs.filePath!);
|
||||
break;
|
||||
case 'listFiles':
|
||||
functionResponse = this.listFiles(rootPath, functionArgs.dirPath!);
|
||||
break;
|
||||
case 'grepFiles':
|
||||
functionResponse = this.grepFiles(rootPath, functionArgs.searchString!, functionArgs.filePattern);
|
||||
break;
|
||||
case 'deleteFile':
|
||||
functionResponse = this.deleteFile(rootPath, functionArgs.filePath!);
|
||||
// Track the file deleted
|
||||
callbacks.onFileDelete(functionArgs.filePath!);
|
||||
break;
|
||||
case 'reportStepOutcome':
|
||||
console.debug(` - received reportStepOutcome function call: ${functionArgs.outcome} - ${functionArgs.reason}`);
|
||||
callbacks.onStepOutcome(functionArgs.outcome!, functionArgs.reason!);
|
||||
functionResponse = `Step outcome recorded: ${functionArgs.outcome} - ${functionArgs.reason}`;
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown function: ${functionName}`);
|
||||
}
|
||||
return functionResponse;
|
||||
} catch (error) {
|
||||
let errorMessage = error instanceof Error ? error.message : String(error);
|
||||
console.error(`Error executing function ${functionName}: ${errorMessage}`);
|
||||
|
||||
return {error: errorMessage};
|
||||
}
|
||||
}
|
||||
|
||||
private async handleGeminiStream(generativeModel: GenerativeModel, request: GenerateContentRequest,
|
||||
rootPath: string,
|
||||
geminiResponse: GeminiResponse = {
|
||||
stepOutcomes: [],
|
||||
fileDeleted: [],
|
||||
fileWritten: [],
|
||||
modelResponses: []
|
||||
}): Promise<GeminiResponse> {
|
||||
// Generate content in a streaming fashion
|
||||
const streamGenerateContentResult = await generativeModel.generateContentStream(request);
|
||||
|
||||
const pendingFunctionCalls = [];
|
||||
|
||||
// Process the streaming response
|
||||
for await (const item of streamGenerateContentResult.stream) {
|
||||
// Iterate over every part in the response
|
||||
let generateContentCandidates = item.candidates ?? [];
|
||||
if (generateContentCandidates.length === 0) {
|
||||
throw new Error(`No candidates found in streaming response`);
|
||||
}
|
||||
if (generateContentCandidates.length > 1) {
|
||||
console.warn(`Multiple (${generateContentCandidates.length}) candidates found in streaming response. Using the first one`);
|
||||
}
|
||||
const responseCandidate = generateContentCandidates[0];
|
||||
const responseParts = responseCandidate.content?.parts || [];
|
||||
|
||||
if (responseParts.length === 0) {
|
||||
console.warn(`No parts found in streaming response`);
|
||||
return geminiResponse;
|
||||
}
|
||||
|
||||
for (const part of responseParts) {
|
||||
if (part.functionCall) {
|
||||
const functionCall = part.functionCall;
|
||||
pendingFunctionCalls.push(functionCall);
|
||||
} else if (part.text) {
|
||||
const textContent = part.text;
|
||||
geminiResponse.modelResponses.push(textContent);
|
||||
} else {
|
||||
console.warn(`Unhandled response part: ${JSON.stringify(part)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process any function calls that were detected
|
||||
if (pendingFunctionCalls.length > 0) {
|
||||
// TODO: drop old content above 1M tokens
|
||||
const updatedRequestContents = [
|
||||
...request.contents,
|
||||
];
|
||||
for (const functionCall of pendingFunctionCalls) {
|
||||
const responseData = this.processFunctionCall(functionCall, rootPath, {
|
||||
onFileWritten: (f) => geminiResponse.fileWritten.push(f),
|
||||
onFileDelete: (f) => geminiResponse.fileDeleted.push(f),
|
||||
onStepOutcome: (outcome, reason) => geminiResponse.stepOutcomes.push({
|
||||
decision: outcome,
|
||||
reason: reason
|
||||
})
|
||||
});
|
||||
const contents = this.createFunctionExchangeContents(functionCall, responseData);
|
||||
updatedRequestContents.push(...contents);
|
||||
}
|
||||
|
||||
// Submit a new request
|
||||
const updatedRequest: GenerateContentRequest = {
|
||||
contents: updatedRequestContents,
|
||||
tools: this.fileOperationTools,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the next streaming response
|
||||
* @param nextStreamingResp Next streaming response
|
||||
* @param isAfterError Whether this is after an error
|
||||
* @returns Object containing text content and function call
|
||||
*/
|
||||
private async processNextStreamingResponse(
|
||||
nextStreamingResp: any,
|
||||
isAfterError: boolean = false
|
||||
): Promise<{
|
||||
textContent: string,
|
||||
functionCall: any
|
||||
}> {
|
||||
let textContent = '';
|
||||
let functionCall = null;
|
||||
|
||||
for await (const nextItem of nextStreamingResp.stream) {
|
||||
// Iterate over every part in the response
|
||||
for (const part of nextItem.candidates?.[0]?.content?.parts || []) {
|
||||
if (part.functionCall) {
|
||||
functionCall = part.functionCall;
|
||||
break;
|
||||
} else if (part.text) {
|
||||
textContent += part.text;
|
||||
return this.handleGeminiStream(generativeModel, updatedRequest, rootPath, geminiResponse);
|
||||
} else {
|
||||
return geminiResponse;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {textContent, functionCall};
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -202,6 +202,10 @@ export class ProcessorService {
|
||||
console.error(`Failure for project ${project.name}: ${result.error}`);
|
||||
return result;
|
||||
}
|
||||
if (result.gitPatch == null) {
|
||||
console.warn(`No changes to commit for project ${project.name}`);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Skip creating commits/PRs if dry run is enabled
|
||||
if (DRY_RUN_SKIP_COMMITS) {
|
||||
|
@ -3,10 +3,12 @@
|
||||
*/
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import {ProcessResult, TestSpecImplementationStatus} from '../types';
|
||||
import {ProcessResult} from '../types';
|
||||
import {ProjectService} from './project-service';
|
||||
import {DRY_RUN_SKIP_GEMINI} from '../config';
|
||||
import {GeminiFileSystemService, Project, RepositoryService as SharedRepositoryService,} from 'shared-functions';
|
||||
import {GeminiResponse} from "shared-functions/dist/services/gemini-file-system-service";
|
||||
import {success} from "concurrently/dist/src/defaults";
|
||||
|
||||
export class ProjectTestSpecsService {
|
||||
private projectService: ProjectService;
|
||||
@ -37,10 +39,11 @@ export class ProjectTestSpecsService {
|
||||
// Generate git patch if any files were written
|
||||
let gitPatch: string | undefined = undefined;
|
||||
|
||||
if ((result.filesWritten?.length ?? 0) > 0) {
|
||||
if ((result.filesWritten?.length ?? 0) > 0 || (result.filesRemoved?.length ?? 0) > 0) {
|
||||
try {
|
||||
console.log(`Generating git patch for project ${project.name} with ${result.filesWritten} files written`);
|
||||
gitPatch = await this.sharedRepositoryService.generateGitPatch(projectRepoPath);
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Error generating git patch for project ${project.name}:`, error);
|
||||
}
|
||||
@ -50,6 +53,7 @@ export class ProjectTestSpecsService {
|
||||
...result,
|
||||
gitPatch
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Error processing project ${project.name}:`, error);
|
||||
return {
|
||||
@ -83,26 +87,12 @@ export class ProjectTestSpecsService {
|
||||
relevantFiles
|
||||
);
|
||||
|
||||
// Check status consistency
|
||||
if (result.decision?.decision === 'skip') {
|
||||
if (result.filesWritten.length > 0) {
|
||||
throw new Error(`Skip decision with files written: ${result.filesWritten.join(', ')}`);
|
||||
}
|
||||
if (result.filesDeleted.length > 0) {
|
||||
throw new Error(`Skip decision with files deleted: ${result.filesDeleted.join(', ')}`);
|
||||
}
|
||||
} else if (result.decision?.decision === 'create' || result.decision?.decision === 'update') {
|
||||
if (result.filesWritten.length === 0) {
|
||||
throw new Error(`${result.decision.decision} decision with no files written`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`ProjectTestSpecsService: Completed processing project (Status: ${result.decision?.decision}, Files written: ${result.filesWritten.length})`);
|
||||
console.log(`ProjectTestSpecsService: Completed processing project (Files written: ${result.fileWritten.length})`);
|
||||
return {
|
||||
project: project,
|
||||
success: true,
|
||||
filesWritten: result.filesWritten,
|
||||
filesRemoved: result.filesDeleted,
|
||||
filesWritten: result.fileWritten,
|
||||
filesRemoved: result.fileDeleted,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error processing project ${project.name}:`, error);
|
||||
@ -158,25 +148,15 @@ export class ProjectTestSpecsService {
|
||||
projectRepoPath: string,
|
||||
guidelines: string,
|
||||
relevantFiles: Record<string, string> = {}
|
||||
): Promise<{
|
||||
text: string;
|
||||
decision?: { decision: TestSpecImplementationStatus; reason: string };
|
||||
filesWritten: string[];
|
||||
filesDeleted: string[];
|
||||
}> {
|
||||
const currentDate = new Date().toISOString();
|
||||
|
||||
): Promise<GeminiResponse> {
|
||||
// If dry run is enabled, return a mock implementation
|
||||
if (DRY_RUN_SKIP_GEMINI) {
|
||||
const mockText = `# Generated by test-spec-to-test-implementation on ${currentDate} (DRY RUN)`;
|
||||
console.warn(`[DRY RUN] Skipping Gemini API call for processing`);
|
||||
return {
|
||||
text: mockText,
|
||||
decision: {
|
||||
decision: 'create',
|
||||
reason: 'This is a mock decision for dry run mode'
|
||||
},
|
||||
filesWritten: [],
|
||||
filesDeleted: []
|
||||
modelResponses: [],
|
||||
stepOutcomes: [],
|
||||
fileDeleted: [],
|
||||
fileWritten: []
|
||||
};
|
||||
}
|
||||
|
||||
@ -204,11 +184,6 @@ export class ProjectTestSpecsService {
|
||||
projectRepoPath
|
||||
);
|
||||
|
||||
return {
|
||||
text: result.text,
|
||||
decision: result.decision as { decision: TestSpecImplementationStatus; reason: string },
|
||||
filesWritten: result.filesWritten,
|
||||
filesDeleted: result.filesDeleted
|
||||
};
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user