This commit is contained in:
cghislai 2025-06-08 15:03:16 +02:00
parent d1cebaca1a
commit 128ad5ee1f
12 changed files with 761 additions and 913 deletions

View File

@ -28,9 +28,6 @@ export function formatHttpResponse(results: ProcessResult[]): HttpResponse {
const projects: ProjectSummary[] = results.map(result => { const projects: ProjectSummary[] = results.map(result => {
// Count workitems // Count workitems
const workitemsProcessed = result.processedWorkitems.length; const workitemsProcessed = result.processedWorkitems.length;
const workitemsSkipped = result.processedWorkitems.filter(w => w.success && w.status === "skip").length;
const workitemsUpdated = result.processedWorkitems.filter(w => w.success && w.status === "update").length;
const workitemsCreated = result.processedWorkitems.filter(w => w.success && w.status === 'create').length;
const filesWritten = result.processedWorkitems.reduce((sum, w) => sum + (w.filesWritten?.length || 0), 0); const filesWritten = result.processedWorkitems.reduce((sum, w) => sum + (w.filesWritten?.length || 0), 0);
return { return {
@ -38,9 +35,6 @@ export function formatHttpResponse(results: ProcessResult[]): HttpResponse {
success: !result.error, success: !result.error,
error: result.error, error: result.error,
workitemsProcessed, workitemsProcessed,
workitemsSkipped,
workitemsUpdated,
workitemsCreated,
filesWritten, filesWritten,
pullRequestUrl: result.pullRequestUrl, pullRequestUrl: result.pullRequestUrl,
}; };

View File

@ -1,5 +1,5 @@
import { formatHttpResponse } from '../../index'; import {formatHttpResponse} from '../../index';
import { ProcessResult, HttpResponse } from '../../types'; import {ProcessResult, HttpResponse} from '../../types';
describe('formatHttpResponse', () => { describe('formatHttpResponse', () => {
it('should format process results into a concise HTTP response', () => { it('should format process results into a concise HTTP response', () => {
@ -22,7 +22,6 @@ describe('formatHttpResponse', () => {
isActive: true isActive: true
}, },
success: true, success: true,
status: 'update'
}, },
{ {
workitem: { workitem: {
@ -33,7 +32,6 @@ describe('formatHttpResponse', () => {
isActive: false isActive: false
}, },
success: true, success: true,
status: 'update'
} }
], ],
pullRequestUrl: 'https://github.com/org/project1/pull/123' pullRequestUrl: 'https://github.com/org/project1/pull/123'

View File

@ -71,8 +71,8 @@ describe('ProcessorService', () => {
{ {
project, project,
processedWorkitems: [ processedWorkitems: [
{workitem: workitem1, success: true, status: 'update', filesWritten: []}, {workitem: workitem1, success: true, filesWritten: []},
{workitem: workitem2, success: true, status: 'update', filesWritten: []} {workitem: workitem2, success: true, filesWritten: []}
], ],
pullRequestUrl: 'https://github.com/org/test-project/pull/123', pullRequestUrl: 'https://github.com/org/test-project/pull/123',
gitPatch: 'mock-git-patch' gitPatch: 'mock-git-patch'
@ -147,8 +147,8 @@ describe('ProcessorService', () => {
{ {
project, project,
processedWorkitems: [ processedWorkitems: [
{workitem: activeWorkitem, success: true, status: 'update', filesWritten: []}, {workitem: activeWorkitem, success: true, filesWritten: []},
{workitem: deactivatedWorkitem, success: true, status: 'skip', filesWritten: []} {workitem: deactivatedWorkitem, success: true, filesWritten: []}
], ],
pullRequestUrl: 'https://github.com/org/test-project/pull/123' pullRequestUrl: 'https://github.com/org/test-project/pull/123'
} }

View File

@ -1,359 +0,0 @@
import * as fs from 'fs';
import * as path from 'path';
import { ProjectService } from '../project-service';
import { WorkitemImplementationStatus } from '../../types';
// Mock fs and path modules
jest.mock('fs');
jest.mock('path');
describe('ProjectService - Log Append Feature', () => {
let projectService: ProjectService;
const mockTimestamp = '2023-01-01T12:00:00.000Z';
beforeEach(() => {
projectService = new ProjectService();
// Reset all mocks
jest.resetAllMocks();
// Mock path.join to return predictable paths
(path.join as jest.Mock).mockImplementation((...args) => args.join('/'));
// Mock Date.toISOString to return a fixed timestamp
jest.spyOn(Date.prototype, 'toISOString').mockReturnValue(mockTimestamp);
});
afterEach(() => {
jest.restoreAllMocks();
});
describe('updateWorkitemWithImplementationLog', () => {
it('should append logs to existing Log section', async () => {
const workitemContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Active
### Log
Some existing log content.
`;
const workitem = {
name: 'workitem',
path: 'path/to/workitem.md',
title: 'Workitem Title',
description: 'This is a description of the workitem.',
jiraReference: 'JIRA-123',
implementation: '',
isActive: true
};
const status: WorkitemImplementationStatus = 'create';
const filesWritten = ['file1.ts', 'file2.ts'];
const filesRemoved: string[] = [];
// Mock fs.existsSync to return true for workitem file
(fs.existsSync as jest.Mock).mockReturnValue(true);
// Mock fs.readFileSync to return workitem content
(fs.readFileSync as jest.Mock).mockReturnValue(workitemContent);
// Mock fs.writeFileSync to capture the actual output
let actualContent = '';
(fs.writeFileSync as jest.Mock).mockImplementation((path, content) => {
actualContent = content;
});
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
// Verify that fs.writeFileSync was called with the path
expect(fs.writeFileSync).toHaveBeenCalledWith(
'path/to/workitem.md',
expect.any(String),
'utf-8'
);
// Get the actual content from the mock
const actualContentFromMock = (fs.writeFileSync as jest.Mock).mock.calls[0][1];
// Verify the complete content equality
const expectedContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Active
### Log
${mockTimestamp} - Workitem has been implemented.
- Created file1.ts
- Created file2.ts
Some existing log content.
`;
expect(actualContentFromMock).toEqual(expectedContent);
});
it('should add Log section if it does not exist', async () => {
const workitemContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Active
`;
const workitem = {
name: 'workitem',
path: 'path/to/workitem.md',
title: 'Workitem Title',
description: 'This is a description of the workitem.',
jiraReference: 'JIRA-123',
implementation: '',
isActive: true
};
const status: WorkitemImplementationStatus = 'update';
const filesWritten = ['file1.ts', 'file2.ts'];
const filesRemoved: string[] = [];
// Mock fs.existsSync to return true for workitem file
(fs.existsSync as jest.Mock).mockReturnValue(true);
// Mock fs.readFileSync to return workitem content
(fs.readFileSync as jest.Mock).mockReturnValue(workitemContent);
// Mock fs.writeFileSync to capture the actual output
let actualContent = '';
(fs.writeFileSync as jest.Mock).mockImplementation((path, content) => {
actualContent = content;
});
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
// Verify that fs.writeFileSync was called with the path
expect(fs.writeFileSync).toHaveBeenCalledWith(
'path/to/workitem.md',
expect.any(String),
'utf-8'
);
// Get the actual content from the mock
const actualContentFromMock = (fs.writeFileSync as jest.Mock).mock.calls[0][1];
// Verify the complete content equality
const expectedContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Active
### Log
${mockTimestamp} - Workitem has been updated.
- Created file1.ts
- Created file2.ts
`;
expect(actualContentFromMock).toEqual(expectedContent);
});
it('should handle different status types', async () => {
const workitemContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Active
### Log
Some existing log content.
`;
const workitem = {
name: 'workitem',
path: 'path/to/workitem.md',
title: 'Workitem Title',
description: 'This is a description of the workitem.',
jiraReference: 'JIRA-123',
implementation: '',
isActive: true
};
const status: WorkitemImplementationStatus = 'delete';
const filesWritten: string[] = [];
const filesRemoved = ['file1.ts', 'file2.ts'];
// Mock fs.existsSync to return true for workitem file
(fs.existsSync as jest.Mock).mockReturnValue(true);
// Mock fs.readFileSync to return workitem content
(fs.readFileSync as jest.Mock).mockReturnValue(workitemContent);
// Mock fs.writeFileSync to capture the actual output
let actualContent = '';
(fs.writeFileSync as jest.Mock).mockImplementation((path, content) => {
actualContent = content;
});
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
// Verify that fs.writeFileSync was called with the path
expect(fs.writeFileSync).toHaveBeenCalledWith(
'path/to/workitem.md',
expect.any(String),
'utf-8'
);
// Get the actual content from the mock
const actualContentFromMock = (fs.writeFileSync as jest.Mock).mock.calls[0][1];
// Verify the complete content equality
const expectedContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Active
### Log
${mockTimestamp} - Workitem has been deleted.
- Removed file1.ts
- Removed file2.ts
Some existing log content.
`;
expect(actualContentFromMock).toEqual(expectedContent);
});
it('should handle empty files array', async () => {
const workitemContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Active
### Log
Some existing log content.
`;
const workitem = {
name: 'workitem',
path: 'path/to/workitem.md',
title: 'Workitem Title',
description: 'This is a description of the workitem.',
jiraReference: 'JIRA-123',
implementation: '',
isActive: true
};
const status: WorkitemImplementationStatus = 'create';
const filesWritten: string[] = [];
const filesRemoved: string[] = [];
// Mock fs.existsSync to return true for workitem file
(fs.existsSync as jest.Mock).mockReturnValue(true);
// Mock fs.readFileSync to return workitem content
(fs.readFileSync as jest.Mock).mockReturnValue(workitemContent);
// Mock fs.writeFileSync to capture the actual output
let actualContent = '';
(fs.writeFileSync as jest.Mock).mockImplementation((path, content) => {
actualContent = content;
});
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
// Verify that fs.writeFileSync was called with the path
expect(fs.writeFileSync).toHaveBeenCalledWith(
'path/to/workitem.md',
expect.any(String),
'utf-8'
);
// Get the actual content from the mock
const actualContentFromMock = (fs.writeFileSync as jest.Mock).mock.calls[0][1];
// Verify the complete content equality
const expectedContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Active
### Log
${mockTimestamp} - Workitem has been implemented.
Some existing log content.
`;
expect(actualContentFromMock).toEqual(expectedContent);
});
it('should throw error if workitem file does not exist', async () => {
const workitem = {
name: 'workitem',
path: 'path/to/workitem.md',
title: 'Workitem Title',
description: 'This is a description of the workitem.',
jiraReference: 'JIRA-123',
implementation: '',
isActive: true
};
const status: WorkitemImplementationStatus = 'create';
const filesWritten = ['file1.ts', 'file2.ts'];
const filesRemoved: string[] = [];
// Mock fs.existsSync to return false for workitem file
(fs.existsSync as jest.Mock).mockReturnValue(false);
await expect(projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved))
.rejects.toThrow('Workitem file not found: path/to/workitem.md');
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
expect(fs.readFileSync).not.toHaveBeenCalled();
expect(fs.writeFileSync).not.toHaveBeenCalled();
});
});
});

View File

@ -290,7 +290,7 @@ export class ProcessorService {
// Generate PR description using Gemini // Generate PR description using Gemini
const workItemsSummary = result.processedWorkitems const workItemsSummary = result.processedWorkitems
.map(item => `${item.workitem.name}: ${item.status} (${item.filesWritten?.length ?? 0} written, ${item.filesRemoved?.length ?? 0} removed)`) .map(item => `${item.workitem.name}: ${item.filesWritten?.length ?? 0} written, ${item.filesRemoved?.length ?? 0} removed`)
.reduce((acc, item) => `${acc}\n${item}`, ''); .reduce((acc, item) => `${acc}\n${item}`, '');
const description = await this.geminiService.generatePullRequestDescription( const description = await this.geminiService.generatePullRequestDescription(
workItemsSummary, workItemsSummary,

View File

@ -5,6 +5,7 @@ import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import {Project, ProjectService as SharedProjectService} from 'shared-functions'; import {Project, ProjectService as SharedProjectService} from 'shared-functions';
import {Workitem, WorkitemImplementationStatus} from '../types'; import {Workitem, WorkitemImplementationStatus} from '../types';
import {GeminiResponse} from "shared-functions/dist/services/gemini-file-system-service";
export class ProjectService { export class ProjectService {
private sharedProjectService: SharedProjectService; private sharedProjectService: SharedProjectService;
@ -184,9 +185,7 @@ export class ProjectService {
*/ */
async updateWorkitemWithImplementationLog( async updateWorkitemWithImplementationLog(
workitem: Workitem, workitem: Workitem,
status: WorkitemImplementationStatus, response: GeminiResponse
filesWritten: string[] = [],
filesRemoved: string[] = [],
): Promise<Workitem> { ): Promise<Workitem> {
if (!fs.existsSync(workitem.path)) { if (!fs.existsSync(workitem.path)) {
throw new Error(`Workitem file not found: ${workitem.path}`); throw new Error(`Workitem file not found: ${workitem.path}`);
@ -198,32 +197,17 @@ export class ProjectService {
// Format the log message // Format the log message
const timestamp = new Date().toISOString(); const timestamp = new Date().toISOString();
let logMessage = `${timestamp} - `; let logMessage = `${timestamp} - Gemini updates`;
switch (status) { response.stepOutcomes.forEach(outcome => {
case 'create': logMessage += `\n- ${outcome.decision}: ${outcome.reason}`;
logMessage += `Workitem has been implemented.\n`; })
break; response.fileDeleted.forEach(file => {
case 'update': logMessage += `\n- Delete file ${file}`;
logMessage += `Workitem has been updated.\n`; })
break; response.fileWritten.forEach(file => {
case 'delete': logMessage += `\n- Added file ${file}`;
logMessage += `Workitem has been deleted.\n`; })
break;
}
// Add the list of files
if (filesWritten.length > 0) {
for (const file of filesWritten) {
logMessage += `- Created ${file}\n`;
}
}
if (filesRemoved.length > 0) {
for (const file of filesRemoved) {
logMessage += `- Removed ${file}\n`;
}
}
// Add PR URL if available // Add PR URL if available
if (workitem.pullRequestUrl) { if (workitem.pullRequestUrl) {

View File

@ -11,6 +11,7 @@ import {
Project, Project,
RepositoryService as SharedRepositoryService, RepositoryService as SharedRepositoryService,
} from 'shared-functions'; } from 'shared-functions';
import {GeminiResponse} from "shared-functions/dist/services/gemini-file-system-service";
export class ProjectWorkitemsService { export class ProjectWorkitemsService {
private projectService: ProjectService; private projectService: ProjectService;
@ -116,47 +117,14 @@ export class ProjectWorkitemsService {
relevantFiles relevantFiles
); );
const decision = result.decision?.decision ?? 'skip'; const hasChanges = result.fileWritten.length > 0 || result.fileDeleted.length > 0;
// Check status consistency
switch (decision) {
case "skip":
if (result.filesWritten.length > 0) {
throw new Error(`Skip decision with files written: ${result.filesWritten.join(', ')}`);
}
if (result.filesDeleted.length > 0) {
throw new Error(`Skip decision with files deleted: ${result.filesDeleted.join(', ')}`);
}
break;
case "create":
if (result.filesWritten.length === 0) {
throw new Error(`Create decision with no files written`);
}
break;
case "update":
if (result.filesWritten.length === 0) {
throw new Error(`Update decision with no files written`);
}
break;
case "delete":
if (result.filesDeleted.length === 0) {
throw new Error(`Delete decision with no files deleted`);
}
break;
}
// Update the workitem file with implementation log // Update the workitem file with implementation log
if (decision !== 'skip') { if (hasChanges) {
try { try {
// Determine the log status based on the operation status
const logStatus = decision;
// Update the workitem file with implementation log // Update the workitem file with implementation log
await this.projectService.updateWorkitemWithImplementationLog( await this.projectService.updateWorkitemWithImplementationLog(
workitem, workitem,
logStatus, result
result.filesWritten,
result.filesDeleted
); );
console.log(`ProjectWorkitemsService: Updated workitem file with implementation log for ${workitem.name}`); console.log(`ProjectWorkitemsService: Updated workitem file with implementation log for ${workitem.name}`);
@ -165,13 +133,12 @@ export class ProjectWorkitemsService {
} }
} }
console.log(`ProjectWorkitemsService: Completed processing workitem: ${workitem.name} (Status: ${decision}, Files written: ${result.filesWritten.length})`); console.log(`ProjectWorkitemsService: Completed processing workitem: ${workitem.name} (Files written: ${result.fileWritten.length})`);
return { return {
success: true, success: true,
status: decision,
workitem, workitem,
filesWritten: result.filesWritten, filesWritten: result.fileWritten,
filesRemoved: result.filesDeleted, filesRemoved: result.fileDeleted,
}; };
} catch (error) { } catch (error) {
console.error(`Error processing workitem ${workitem.name}:`, error); console.error(`Error processing workitem ${workitem.name}:`, error);
@ -229,26 +196,17 @@ export class ProjectWorkitemsService {
workitemContent: string, workitemContent: string,
workitemName: string, workitemName: string,
relevantFiles: Record<string, string> = {} relevantFiles: Record<string, string> = {}
): Promise<{ ): Promise<GeminiResponse> {
text: string;
decision?: { decision: 'create' | 'update' | 'delete' | 'skip'; reason: string };
filesWritten: string[];
filesDeleted: string[];
}> {
const currentDate = new Date().toISOString(); const currentDate = new Date().toISOString();
// If dry run is enabled, return a mock feature file // If dry run is enabled, return a mock feature file
if (DRY_RUN_SKIP_GEMINI) { if (DRY_RUN_SKIP_GEMINI) {
console.log(`[DRY RUN] Skipping Gemini API call for generating feature file for ${workitemName}`); console.log(`[DRY RUN] Skipping Gemini API call for generating feature file for ${workitemName}`);
const mockText = `# Generated by prompts-to-test-spec on ${currentDate} (DRY RUN)`;
return { return {
text: mockText, fileWritten: [],
decision: { fileDeleted: [],
decision: 'create', stepOutcomes: [],
reason: 'This is a mock decision for dry run mode' modelResponses: []
},
filesWritten: [],
filesDeleted: []
}; };
} }
@ -284,11 +242,6 @@ export class ProjectWorkitemsService {
projectRepoPath projectRepoPath
); );
return { return result;
text: result.text,
decision: result.decision,
filesWritten: result.filesWritten,
filesDeleted: result.filesDeleted
};
} }
} }

View File

@ -31,7 +31,6 @@ export interface ProcessedWorkItem {
workitem: Workitem; workitem: Workitem;
success: boolean; success: boolean;
error?: string; error?: string;
status?: 'create' | 'update' | 'delete' | 'skip';
filesWritten?: string[]; filesWritten?: string[];
filesRemoved?: string[]; filesRemoved?: string[];
} }
@ -65,9 +64,6 @@ export interface ProjectSummary {
success: boolean; success: boolean;
error?: string; error?: string;
workitemsProcessed: number; workitemsProcessed: number;
workitemsSkipped: number;
workitemsUpdated: number;
workitemsCreated: number;
filesWritten: number; filesWritten: number;
pullRequestUrl?: string; pullRequestUrl?: string;
gitPatch?: string; gitPatch?: string;

View File

@ -0,0 +1,358 @@
import * as fs from 'fs';
import * as path from 'path';
import { GeminiFileSystemService } from '../gemini-file-system-service';
// Mock fs and path modules
jest.mock('fs');
jest.mock('path');
describe('GeminiFileSystemService', () => {
let service: GeminiFileSystemService;
const mockProjectId = 'test-project-id';
beforeEach(() => {
service = new GeminiFileSystemService(mockProjectId);
// Reset all mocks
jest.resetAllMocks();
// Mock path.join to return predictable paths
(path.join as jest.Mock).mockImplementation((...args) => args.join('/'));
// Mock path.relative to return predictable relative paths
(path.relative as jest.Mock).mockImplementation((from, to) => {
return to.replace(`${from}/`, '');
});
});
describe('grepFiles', () => {
it('should throw an error if search string is not provided', () => {
expect(() => {
service.grepFiles('/root', '');
}).toThrow('Search string is required');
});
it('should search for a string in files', () => {
// Mock directory structure
const mockFiles: Record<string, string> = {
'/root/file1.ts': 'const x = 1;\nconst searchTerm = "found";\nconst y = 2;',
'/root/file2.ts': 'const z = 3;\nconst searchTerm = "not found";\nconst w = 4;',
'/root/subdir/file3.ts': 'const a = 5;\nconst searchTerm = "found";\nconst b = 6;',
};
// Mock fs.readdirSync to return directory entries
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
if (dirPath === '/root') {
return [
{ name: 'file1.ts', isDirectory: () => false, isFile: () => true },
{ name: 'file2.ts', isDirectory: () => false, isFile: () => true },
{ name: 'subdir', isDirectory: () => true, isFile: () => false },
];
} else if (dirPath === '/root/subdir') {
return [
{ name: 'file3.ts', isDirectory: () => false, isFile: () => true },
];
}
return [];
});
// Mock fs.readFileSync to return file content
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
return mockFiles[filePath] || '';
});
const results = service.grepFiles('/root', 'found');
// The implementation matches substrings, so "not found" also matches
expect(results).toHaveLength(3);
expect(results[0]).toEqual({
file: 'file1.ts',
line: 2,
content: 'const searchTerm = "found";'
});
expect(results[1]).toEqual({
file: 'file2.ts',
line: 2,
content: 'const searchTerm = "not found";'
});
expect(results[2]).toEqual({
file: 'subdir/file3.ts',
line: 2,
content: 'const searchTerm = "found";'
});
});
it('should search for a string with wildcard', () => {
// Mock directory structure
const mockFiles: Record<string, string> = {
'/root/file1.ts': 'const x = 1;\nconst searchTerm = "found";\nconst y = 2;',
'/root/file2.ts': 'const z = 3;\nconst searchTerm = "not found";\nconst w = 4;',
'/root/file3.ts': 'const a = 5;\nconst searchPrefix = "prefound";\nconst b = 6;',
};
// Mock fs.readdirSync to return directory entries
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
if (dirPath === '/root') {
return [
{ name: 'file1.ts', isDirectory: () => false, isFile: () => true },
{ name: 'file2.ts', isDirectory: () => false, isFile: () => true },
{ name: 'file3.ts', isDirectory: () => false, isFile: () => true },
];
}
return [];
});
// Mock fs.readFileSync to return file content
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
return mockFiles[filePath] || '';
});
const results = service.grepFiles('/root', '*found*');
expect(results).toHaveLength(3);
expect(results[0]).toEqual({
file: 'file1.ts',
line: 2,
content: 'const searchTerm = "found";'
});
expect(results[1]).toEqual({
file: 'file2.ts',
line: 2,
content: 'const searchTerm = "not found";'
});
expect(results[2]).toEqual({
file: 'file3.ts',
line: 2,
content: 'const searchPrefix = "prefound";'
});
});
it('should filter files by pattern', () => {
// Mock directory structure
const mockFiles: Record<string, string> = {
'/root/file1.ts': 'const x = 1;\nconst searchTerm = "found";\nconst y = 2;',
'/root/file2.js': 'const z = 3;\nconst searchTerm = "found";\nconst w = 4;',
'/root/subdir/file3.ts': 'const a = 5;\nconst searchTerm = "found";\nconst b = 6;',
};
// Mock fs.readdirSync to return directory entries
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
if (dirPath === '/root') {
return [
{ name: 'file1.ts', isDirectory: () => false, isFile: () => true },
{ name: 'file2.js', isDirectory: () => false, isFile: () => true },
{ name: 'subdir', isDirectory: () => true, isFile: () => false },
];
} else if (dirPath === '/root/subdir') {
return [
{ name: 'file3.ts', isDirectory: () => false, isFile: () => true },
];
}
return [];
});
// Mock fs.readFileSync to return file content
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
return mockFiles[filePath] || '';
});
// Mock matchesPattern to use the actual implementation
jest.spyOn(service as any, 'matchesPattern').mockImplementation((...args: unknown[]) => {
// Simple implementation for testing
const filename = args[0] as string;
const pattern = args[1] as string;
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
.replace(/\*/g, '.*');
const regex = new RegExp(`^${regexPattern}$`);
return regex.test(filename);
});
const results = service.grepFiles('/root', 'found', '*.ts');
expect(results).toHaveLength(2);
expect(results[0]).toEqual({
file: 'file1.ts',
line: 2,
content: 'const searchTerm = "found";'
});
expect(results[1]).toEqual({
file: 'subdir/file3.ts',
line: 2,
content: 'const searchTerm = "found";'
});
});
it('should skip node_modules and .git directories', () => {
// Mock directory structure
const mockFiles: Record<string, string> = {
'/root/file1.ts': 'const x = 1;\nconst searchTerm = "found";\nconst y = 2;',
'/root/node_modules/file2.ts': 'const z = 3;\nconst searchTerm = "found";\nconst w = 4;',
'/root/.git/file3.ts': 'const a = 5;\nconst searchTerm = "found";\nconst b = 6;',
};
// Mock fs.readdirSync to return directory entries
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
if (dirPath === '/root') {
return [
{ name: 'file1.ts', isDirectory: () => false, isFile: () => true },
{ name: 'node_modules', isDirectory: () => true, isFile: () => false },
{ name: '.git', isDirectory: () => true, isFile: () => false },
];
}
return [];
});
// Mock fs.readFileSync to return file content
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
return mockFiles[filePath] || '';
});
const results = service.grepFiles('/root', 'found');
expect(results).toHaveLength(1);
expect(results[0]).toEqual({
file: 'file1.ts',
line: 2,
content: 'const searchTerm = "found";'
});
});
it('should handle file read errors gracefully', () => {
// Mock directory structure
(fs.readdirSync as jest.Mock).mockImplementation((dirPath, options) => {
if (dirPath === '/root') {
return [
{ name: 'file1.ts', isDirectory: () => false, isFile: () => true },
{ name: 'file2.ts', isDirectory: () => false, isFile: () => true },
];
}
return [];
});
// Mock fs.readFileSync to throw an error for one file
(fs.readFileSync as jest.Mock).mockImplementation((filePath, encoding) => {
if (filePath === '/root/file1.ts') {
return 'const searchTerm = "found";';
} else if (filePath === '/root/file2.ts') {
throw new Error('File read error');
}
return '';
});
const results = service.grepFiles('/root', 'found');
// Should still return results from the file that could be read
expect(results).toHaveLength(1);
expect(results[0]).toEqual({
file: 'file1.ts',
line: 1,
content: 'const searchTerm = "found";'
});
});
it('should match "Ws*Document*Controller" with "WsCustomerDocumentController"', () => {
// Mock directory structure
const mockFiles: Record<string, string> = {
'/root/controller.ts': 'import { WsCustomerDocumentController } from "./controllers";',
};
// Mock fs.readdirSync to return directory entries
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
if (dirPath === '/root') {
return [
{ name: 'controller.ts', isDirectory: () => false, isFile: () => true },
];
}
return [];
});
// Mock fs.readFileSync to return file content
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
return mockFiles[filePath] || '';
});
const results = service.grepFiles('/root', 'Ws*Document*Controller');
expect(results).toHaveLength(1);
expect(results[0]).toEqual({
file: 'controller.ts',
line: 1,
content: 'import { WsCustomerDocumentController } from "./controllers";'
});
});
it('should match "class Ws*Document*Controller" with filePattern "nitro-domain-api/src/main/java/**"', () => {
// Mock directory structure
const mockFiles: Record<string, string> = {
'/root/nitro-domain-api/src/main/java/be/test/WsCustomerDocumentController.java': 'package be.test;\n\npublic class WsCustomerDocumentController {\n // Class implementation\n}',
'/root/some-other-path/SomeOtherFile.java': 'package some.other.path;\n\npublic class WsCustomerDocumentController {\n // Should not match due to file pattern\n}',
};
// Mock fs.readdirSync to return directory entries
(fs.readdirSync as jest.Mock).mockImplementation((dirPath: string, options: any) => {
if (dirPath === '/root') {
return [
{ name: 'nitro-domain-api', isDirectory: () => true, isFile: () => false },
{ name: 'some-other-path', isDirectory: () => true, isFile: () => false },
];
} else if (dirPath === '/root/nitro-domain-api') {
return [
{ name: 'src', isDirectory: () => true, isFile: () => false },
];
} else if (dirPath === '/root/nitro-domain-api/src') {
return [
{ name: 'main', isDirectory: () => true, isFile: () => false },
];
} else if (dirPath === '/root/nitro-domain-api/src/main') {
return [
{ name: 'java', isDirectory: () => true, isFile: () => false },
];
} else if (dirPath === '/root/nitro-domain-api/src/main/java') {
return [
{ name: 'be', isDirectory: () => true, isFile: () => false },
];
} else if (dirPath === '/root/nitro-domain-api/src/main/java/be') {
return [
{ name: 'test', isDirectory: () => true, isFile: () => false },
];
} else if (dirPath === '/root/nitro-domain-api/src/main/java/be/test') {
return [
{ name: 'WsCustomerDocumentController.java', isDirectory: () => false, isFile: () => true },
];
} else if (dirPath === '/root/some-other-path') {
return [
{ name: 'SomeOtherFile.java', isDirectory: () => false, isFile: () => true },
];
}
return [];
});
// Mock fs.readFileSync to return file content
(fs.readFileSync as jest.Mock).mockImplementation((filePath: string, encoding: string) => {
return mockFiles[filePath] || '';
});
// Mock matchesPattern to use the actual implementation
jest.spyOn(service as any, 'matchesPattern').mockImplementation((...args: unknown[]) => {
// Simple implementation for testing
const filename = args[0] as string;
const pattern = args[1] as string;
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
.replace(/\*/g, '.*');
const regex = new RegExp(`^${regexPattern}$`);
return regex.test(filename);
});
const results = service.grepFiles('/root', 'class Ws*Document*Controller', 'nitro-domain-api/src/main/java/**');
expect(results).toHaveLength(1);
expect(results[0]).toEqual({
file: 'nitro-domain-api/src/main/java/be/test/WsCustomerDocumentController.java',
line: 3,
content: 'public class WsCustomerDocumentController {'
});
});
});
});

View File

@ -4,9 +4,12 @@
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import { import {
Content,
FunctionCall,
FunctionDeclarationSchemaType, FunctionDeclarationSchemaType,
GenerateContentCandidate, FunctionResponse,
GenerateContentRequest, GenerateContentRequest,
GenerativeModel,
Tool, Tool,
VertexAI VertexAI
} from '@google-cloud/vertexai'; } from '@google-cloud/vertexai';
@ -24,23 +27,14 @@ export interface FunctionArgs {
reason?: string; reason?: string;
} }
/** export interface GeminiResponse {
* Interface for the model response format fileWritten: string[];
*/ fileDeleted: string[];
export interface ModelResponse { stepOutcomes: {
decision: 'create' | 'update' | 'delete' | 'skip'; decision: 'create' | 'update' | 'delete' | 'skip';
reason: string; reason: string;
} }[];
/**
* Interface for the result of processing a model stream
*/
export interface ModelStreamResult {
text: string;
decision?: ModelResponse;
modelResponses: string[]; modelResponses: string[];
filesWritten: string[];
filesDeleted: string[];
} }
/** /**
@ -148,7 +142,7 @@ export class GeminiFileSystemService {
}, },
filePattern: { filePattern: {
type: FunctionDeclarationSchemaType.STRING, type: FunctionDeclarationSchemaType.STRING,
description: "Optional file pattern to limit the search (e.g., '*.ts', 'src/*.java')" description: "Optional glob file pattern to limit the search (e.g., '*.ts', 'src/*.java')"
} }
}, },
required: ["searchString"] required: ["searchString"]
@ -169,14 +163,14 @@ export class GeminiFileSystemService {
} }
}, },
{ {
name: "reportFinalOutcome", name: "reportStepOutcome",
description: "Submit the final outcome for compliance with guidelines. Can only be called once.", description: "Submit the outcome for a step in compliance with guidelines. Can be called multiple times.",
parameters: { parameters: {
type: FunctionDeclarationSchemaType.OBJECT, type: FunctionDeclarationSchemaType.OBJECT,
properties: { properties: {
outcome: { outcome: {
type: FunctionDeclarationSchemaType.STRING, type: FunctionDeclarationSchemaType.STRING,
description: "The final outcome: 'create', 'update', 'delete', or 'skip'", description: "The step outcome: 'create', 'update', 'delete', or 'skip'",
enum: ["create", "update", "delete", "skip"] enum: ["create", "update", "delete", "skip"]
}, },
reason: { reason: {
@ -253,24 +247,58 @@ export class GeminiFileSystemService {
} }
/** /**
* List files in a directory * List files in a directory, optionally with a glob pattern and recursion
* @param rootPath Root path of the filesystem
* @param dirPath Path to the directory relative to the root path * @param dirPath Path to the directory relative to the root path
* @returns Array of file names * @param pattern Optional glob pattern to filter files
* @returns Array of file paths relative to dirPath
*/ */
listFiles(rootPath: string, dirPath: string): string[] { listFiles(rootPath: string, dirPath: string, pattern?: string): string[] {
console.debug(" - listFiles called with dirPath: " + dirPath); console.debug(" - listFiles called with dirPath: " + dirPath + (pattern ? ", pattern: " + pattern : ""));
const fullPath = path.join(rootPath, dirPath); const fullPath = path.join(rootPath, dirPath);
if (!fs.existsSync(fullPath)) { if (!fs.existsSync(fullPath)) {
throw new Error(`Directory not found: ${dirPath}`); throw new Error(`Directory not found: ${dirPath}`);
} }
return fs.readdirSync(fullPath);
const results: string[] = [];
// Helper function to recursively list files in a directory
const listFilesInDirectory = (currentPath: string, basePath: string) => {
try {
const entries = fs.readdirSync(currentPath, {withFileTypes: true});
for (const entry of entries) {
const entryPath = path.join(currentPath, entry.name);
const relativePath = path.relative(basePath, entryPath);
if (entry.isDirectory()) {
// If pattern includes ** (recursive glob), recurse into subdirectories
if (pattern && pattern.includes('**')) {
listFilesInDirectory(entryPath, basePath);
}
} else if (entry.isFile()) {
// Check if the file matches the pattern
if (!pattern || this.matchesPattern(relativePath, pattern)) {
results.push(relativePath);
}
}
}
} catch (error) {
// Silently ignore directory read errors
}
};
// Start listing from the specified directory
listFilesInDirectory(fullPath, fullPath);
return results;
} }
/** /**
* Search for a string in files * Search for a string in files
* @param rootPath Root path to search in * @param rootPath Root path to search in
* @param searchString String to search for * @param searchString String to search for. * can be used for wildcards
* @param filePattern Optional file pattern to limit the search (e.g., "*.ts", "src/*.java") * @param filePattern Optional file pattern to limit the search (e.g., "*.ts", "src/*.java", "src/**")
* @returns Array of matches with file paths and line numbers * @returns Array of matches with file paths and line numbers
* @throws Error if search string is not provided * @throws Error if search string is not provided
*/ */
@ -292,8 +320,11 @@ export class GeminiFileSystemService {
const content = fs.readFileSync(filePath, 'utf-8'); const content = fs.readFileSync(filePath, 'utf-8');
const lines = content.split('\n'); const lines = content.split('\n');
const pattern = searchString.replace(/\*/g, '.*'); // Convert * to .*
const regex = new RegExp(`.*${pattern}.*`);
for (let i = 0; i < lines.length; i++) { for (let i = 0; i < lines.length; i++) {
if (lines[i].includes(searchString)) { if (regex.test(lines[i])) {
results.push({ results.push({
file: relativePath, file: relativePath,
line: i + 1, // 1-based line numbers line: i + 1, // 1-based line numbers
@ -367,22 +398,15 @@ export class GeminiFileSystemService {
guidelines: string, guidelines: string,
additionalContent: string, additionalContent: string,
rootPath: string rootPath: string
): Promise<ModelStreamResult> { ): Promise<GeminiResponse> {
const currentDate = new Date().toISOString();
// If dry run is enabled, return a mock result // If dry run is enabled, return a mock result
if (this.dryRun) { if (this.dryRun) {
console.log(`[DRY RUN] Skipping Gemini API call for processing`); console.log(`[DRY RUN] Skipping Gemini API call for processing`);
const mockText = `# Generated on ${currentDate} (DRY RUN)`;
return { return {
text: mockText, stepOutcomes: [],
decision: { fileDeleted: [],
decision: 'create',
reason: 'This is a mock decision for dry run mode'
},
modelResponses: [], modelResponses: [],
filesWritten: [], fileWritten: []
filesDeleted: []
}; };
} }
@ -398,18 +422,18 @@ ${additionalContent}
You have access to the following function calls to help you understand the project structure and create implementations: You have access to the following function calls to help you understand the project structure and create implementations:
- getFileContent(filePath): Get the content of a file in the project repository - getFileContent(filePath): Get the content of a file in the project repository
- writeFileContent(filePath, content): Write content to a file in the project repository - writeFileContent(filePath, content): Write content to a file in the project repository (create or update)
- fileExists(filePath): Check if a file exists in the project repository - fileExists(filePath): Check if a file exists in the project repository
- listFiles(dirPath): List files in a directory in the project repository - listFiles(dirPath): List files in a directory in the project repository
- grepFiles(searchString, filePattern): Search for a string in project files, optionally filtered by a file pattern - grepFiles(searchString, filePattern): Search for a string in project files, optionally filtered by a file pattern (glob)
use filePattern='path/**' to search recursively in all files under path.
- deleteFile(filePath): Delete a file from the project repository - deleteFile(filePath): Delete a file from the project repository
IMPORTANT: First use the function calls above to comply with the guidelines. Create, update, or delete all required files. IMPORTANT: First use the function calls above to comply with the guidelines. Create, update, or delete all required files.
Then, once finished with all the guidelines above, use this function once to report the overall outcome: You can use this function to report the outcome of each step as you work through the guidelines:
- reportFinalOutcome(outcome, reason): Outcome must be one of: 'create', 'update', 'delete', 'skip' - reportStepOutcome(outcome, reason): Outcome must be one of: 'create', 'update', 'delete', 'skip'
You won't be able to update other files once you've made a decision.
`; `;
// Instantiate the model with our file operation tools // Instantiate the model with our file operation tools
@ -417,9 +441,7 @@ You won't be able to update other files once you've made a decision.
model: this.model, model: this.model,
tools: this.fileOperationTools, tools: this.fileOperationTools,
generation_config: { generation_config: {
temperature: 0.3, // Very low temperature for more deterministic responses temperature: 0.2, // Very low temperature for more deterministic responses
top_p: 0.8, // Higher top_p to allow more diverse completions when needed
top_k: 60, // Consider only the top 40 tokens
}, },
}); });
@ -430,199 +452,29 @@ You won't be able to update other files once you've made a decision.
], ],
tools: this.fileOperationTools, tools: this.fileOperationTools,
}; };
const geminiResponse = await this.handleGeminiStream(generativeModel, request, rootPath);
// Generate content in a streaming fashion console.debug("--- Gemini response:");
const streamingResp = await generativeModel.generateContentStream(request); geminiResponse.modelResponses.forEach(r => {
console.debug(r);
})
console.debug("---");
// Track state within the method scope return geminiResponse;
const filesWritten: string[] = [];
const filesDeleted: string[] = [];
const modelResponses: string[] = [];
let decision: ModelResponse | undefined;
let finalResponse = '';
let pendingFunctionCalls = [];
// Process the streaming response
for await (const item of streamingResp.stream) {
// Check if there's a function call in any part of the response
let functionCall = null;
let textContent = '';
// Iterate over every part in the response
for (const part of item.candidates?.[0]?.content?.parts || []) {
if (part.functionCall) {
functionCall = part.functionCall;
break;
} else if (part.text) {
textContent += part.text;
}
}
if (functionCall) {
pendingFunctionCalls.push(functionCall);
} else if (textContent) {
// If there's text, append it to the final response
finalResponse += textContent;
modelResponses.push(textContent);
console.debug("- received text: " + textContent);
}
}
// Process any function calls that were detected
if (pendingFunctionCalls.length > 0) {
let currentRequest: GenerateContentRequest = request;
// Process each function call
for (const functionCall of pendingFunctionCalls) {
const functionName = functionCall.name;
const functionArgs = (typeof functionCall.args === 'string' ?
JSON.parse(functionCall.args) : functionCall.args) as FunctionArgs;
let functionResponse;
try {
// Execute the function
switch (functionName) {
case 'getFileContent':
functionResponse = this.getFileContent(rootPath, functionArgs.filePath!);
break;
case 'writeFileContent':
this.writeFileContent(rootPath, functionArgs.filePath!, functionArgs.content!);
functionResponse = `File ${functionArgs.filePath} written successfully`;
// Track the file written
filesWritten.push(functionArgs.filePath!);
break;
case 'fileExists':
functionResponse = this.fileExists(rootPath, functionArgs.filePath!);
break;
case 'listFiles':
functionResponse = this.listFiles(rootPath, functionArgs.dirPath!);
break;
case 'grepFiles':
functionResponse = this.grepFiles(rootPath, functionArgs.searchString!, functionArgs.filePattern);
break;
case 'deleteFile':
functionResponse = this.deleteFile(rootPath, functionArgs.filePath!);
// Track the file deleted
filesDeleted.push(functionArgs.filePath!);
break;
case 'reportFinalOutcome':
console.debug(`- received reportFinalOutcome function call: ${functionArgs.outcome} - ${functionArgs.reason}`);
// Store the decision
decision = {
decision: functionArgs.outcome!,
reason: functionArgs.reason!
};
functionResponse = `Outcome recorded: ${functionArgs.outcome} - ${functionArgs.reason}`;
break;
default:
throw new Error(`Unknown function: ${functionName}`);
} }
private createFunctionExchangeContents(
functionCall: FunctionCall,
responseData: any,
): Content[] {
// Create a function response object // Create a function response object
const functionResponseObj = { const functionResponseObj: FunctionResponse = {
name: functionName, name: functionCall.name,
response: {result: JSON.stringify(functionResponse)} response: {
data: JSON.stringify(responseData),
},
}; };
return [
// Update the request with the function call and response
currentRequest = this.createNextRequest(currentRequest, functionCall, functionResponseObj);
// Generate the next response
const nextStreamingResp = await generativeModel.generateContentStream(currentRequest);
// Process the next streaming response
const nextResult = await this.processNextStreamingResponse(nextStreamingResp);
// Update state
finalResponse += nextResult.textContent;
if (nextResult.textContent) {
modelResponses.push(nextResult.textContent);
}
if (nextResult.functionCall) {
if (decision != null) {
console.warn(`Received another function call for ${nextResult.functionCall.name}, but a decision hsa been recorded. Ignoring stream`);
break;
}
pendingFunctionCalls.push(nextResult.functionCall);
}
} catch (error) {
let errorMessage = error instanceof Error ? error.message : String(error);
console.error(`Error executing function ${functionName}: ${errorMessage}`);
// Create an error response object
const errorResponseObj = {
name: functionName,
response: {error: errorMessage}
};
// Update the request with the function call and error response
currentRequest = this.createNextRequest(currentRequest, functionCall, errorResponseObj, true);
// Generate the next response
const nextStreamingResp = await generativeModel.generateContentStream(currentRequest);
// Process the next streaming response
const nextResult = await this.processNextStreamingResponse(nextStreamingResp, true);
// Update state
finalResponse += nextResult.textContent;
if (nextResult.textContent) {
modelResponses.push(nextResult.textContent);
}
if (nextResult.functionCall) {
if (decision != null) {
console.warn(`Received another function call for ${nextResult.functionCall.name}, but a decision hsa been recorded. Ignoring stream`);
break;
}
pendingFunctionCalls.push(nextResult.functionCall);
}
}
}
}
// If no explicit decision was made using the reportFinalOutcome function, try to parse it from the text
if (!decision) {
console.warn(`No decision function call made during the stream session`);
try {
// Try to parse a JSON decision from the text
const jsonMatch = finalResponse.match(/\{[\s\S]*"decision"[\s\S]*\}/);
if (jsonMatch) {
decision = JSON.parse(jsonMatch[0]) as ModelResponse;
}
} catch (error) {
console.error(`Error parsing JSON decision:`, error);
}
}
console.debug(`- Completed gemini stream processing. Final response: ${decision?.decision} - ${decision?.reason}`);
return {
text: finalResponse,
decision: decision ?? {decision: "skip", reason: "No decision received/parsed"},
modelResponses: modelResponses,
filesWritten: filesWritten,
filesDeleted: filesDeleted
};
}
/**
* Create the next request with function call and response
* @param currentRequest Current request
* @param functionCall Function call object
* @param functionResponseObj Function response object
* @param isError Whether the response is an error
* @returns Next request
*/
private createNextRequest(
currentRequest: GenerateContentRequest,
functionCall: any,
functionResponseObj: any,
isError: boolean = false
): GenerateContentRequest {
return {
contents: [
...currentRequest.contents,
{ {
role: 'ASSISTANT', role: 'ASSISTANT',
parts: [ parts: [
@ -639,40 +491,133 @@ You won't be able to update other files once you've made a decision.
} }
] ]
} }
], ];
}
private processFunctionCall(functionCall: FunctionCall, rootPath: string, callbacks: {
onFileWritten: (file: string) => any;
onFileDelete: (file: string) => any;
onStepOutcome: (outcome: 'create' | 'update' | 'delete' | 'skip', reason: string) => any
}): string | string[] | boolean | any {
const functionName = functionCall.name;
try {
const functionArgs = (typeof functionCall.args === 'string' ?
JSON.parse(functionCall.args) : functionCall.args) as FunctionArgs;
let functionResponse: string | string[] | boolean | any;
// Execute the function
switch (functionName) {
case 'getFileContent':
functionResponse = this.getFileContent(rootPath, functionArgs.filePath!);
break;
case 'writeFileContent':
this.writeFileContent(rootPath, functionArgs.filePath!, functionArgs.content!);
functionResponse = `File ${functionArgs.filePath} written successfully`;
// Track the file written
callbacks.onFileWritten(functionArgs.filePath!);
break;
case 'fileExists':
functionResponse = this.fileExists(rootPath, functionArgs.filePath!);
break;
case 'listFiles':
functionResponse = this.listFiles(rootPath, functionArgs.dirPath!);
break;
case 'grepFiles':
functionResponse = this.grepFiles(rootPath, functionArgs.searchString!, functionArgs.filePattern);
break;
case 'deleteFile':
functionResponse = this.deleteFile(rootPath, functionArgs.filePath!);
// Track the file deleted
callbacks.onFileDelete(functionArgs.filePath!);
break;
case 'reportStepOutcome':
console.debug(` - received reportStepOutcome function call: ${functionArgs.outcome} - ${functionArgs.reason}`);
callbacks.onStepOutcome(functionArgs.outcome!, functionArgs.reason!);
functionResponse = `Step outcome recorded: ${functionArgs.outcome} - ${functionArgs.reason}`;
break;
default:
throw new Error(`Unknown function: ${functionName}`);
}
return functionResponse;
} catch (error) {
let errorMessage = error instanceof Error ? error.message : String(error);
console.error(`Error executing function ${functionName}: ${errorMessage}`);
return {error: errorMessage};
}
}
private async handleGeminiStream(generativeModel: GenerativeModel, request: GenerateContentRequest,
rootPath: string,
geminiResponse: GeminiResponse = {
stepOutcomes: [],
fileDeleted: [],
fileWritten: [],
modelResponses: []
}): Promise<GeminiResponse> {
// Generate content in a streaming fashion
const streamGenerateContentResult = await generativeModel.generateContentStream(request);
const pendingFunctionCalls = [];
// Process the streaming response
for await (const item of streamGenerateContentResult.stream) {
// Iterate over every part in the response
let generateContentCandidates = item.candidates ?? [];
if (generateContentCandidates.length === 0) {
throw new Error(`No candidates found in streaming response`);
}
if (generateContentCandidates.length > 1) {
console.warn(`Multiple (${generateContentCandidates.length}) candidates found in streaming response. Using the first one`);
}
const responseCandidate = generateContentCandidates[0];
const responseParts = responseCandidate.content?.parts || [];
if (responseParts.length === 0) {
console.warn(`No parts found in streaming response`);
return geminiResponse;
}
for (const part of responseParts) {
if (part.functionCall) {
const functionCall = part.functionCall;
pendingFunctionCalls.push(functionCall);
} else if (part.text) {
const textContent = part.text;
geminiResponse.modelResponses.push(textContent);
} else {
console.warn(`Unhandled response part: ${JSON.stringify(part)}`);
}
}
}
// Process any function calls that were detected
if (pendingFunctionCalls.length > 0) {
// TODO: drop old content above 1M tokens
const updatedRequestContents = [
...request.contents,
];
for (const functionCall of pendingFunctionCalls) {
const responseData = this.processFunctionCall(functionCall, rootPath, {
onFileWritten: (f) => geminiResponse.fileWritten.push(f),
onFileDelete: (f) => geminiResponse.fileDeleted.push(f),
onStepOutcome: (outcome, reason) => geminiResponse.stepOutcomes.push({
decision: outcome,
reason: reason
})
});
const contents = this.createFunctionExchangeContents(functionCall, responseData);
updatedRequestContents.push(...contents);
}
// Submit a new request
const updatedRequest: GenerateContentRequest = {
contents: updatedRequestContents,
tools: this.fileOperationTools, tools: this.fileOperationTools,
}; };
} return this.handleGeminiStream(generativeModel, updatedRequest, rootPath, geminiResponse);
} else {
/** return geminiResponse;
* Process the next streaming response
* @param nextStreamingResp Next streaming response
* @param isAfterError Whether this is after an error
* @returns Object containing text content and function call
*/
private async processNextStreamingResponse(
nextStreamingResp: any,
isAfterError: boolean = false
): Promise<{
textContent: string,
functionCall: any
}> {
let textContent = '';
let functionCall = null;
for await (const nextItem of nextStreamingResp.stream) {
// Iterate over every part in the response
for (const part of nextItem.candidates?.[0]?.content?.parts || []) {
if (part.functionCall) {
functionCall = part.functionCall;
break;
} else if (part.text) {
textContent += part.text;
} }
} }
}
return {textContent, functionCall};
}
} }

View File

@ -202,6 +202,10 @@ export class ProcessorService {
console.error(`Failure for project ${project.name}: ${result.error}`); console.error(`Failure for project ${project.name}: ${result.error}`);
return result; return result;
} }
if (result.gitPatch == null) {
console.warn(`No changes to commit for project ${project.name}`);
return result;
}
// Skip creating commits/PRs if dry run is enabled // Skip creating commits/PRs if dry run is enabled
if (DRY_RUN_SKIP_COMMITS) { if (DRY_RUN_SKIP_COMMITS) {

View File

@ -3,10 +3,12 @@
*/ */
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import {ProcessResult, TestSpecImplementationStatus} from '../types'; import {ProcessResult} from '../types';
import {ProjectService} from './project-service'; import {ProjectService} from './project-service';
import {DRY_RUN_SKIP_GEMINI} from '../config'; import {DRY_RUN_SKIP_GEMINI} from '../config';
import {GeminiFileSystemService, Project, RepositoryService as SharedRepositoryService,} from 'shared-functions'; import {GeminiFileSystemService, Project, RepositoryService as SharedRepositoryService,} from 'shared-functions';
import {GeminiResponse} from "shared-functions/dist/services/gemini-file-system-service";
import {success} from "concurrently/dist/src/defaults";
export class ProjectTestSpecsService { export class ProjectTestSpecsService {
private projectService: ProjectService; private projectService: ProjectService;
@ -37,10 +39,11 @@ export class ProjectTestSpecsService {
// Generate git patch if any files were written // Generate git patch if any files were written
let gitPatch: string | undefined = undefined; let gitPatch: string | undefined = undefined;
if ((result.filesWritten?.length ?? 0) > 0) { if ((result.filesWritten?.length ?? 0) > 0 || (result.filesRemoved?.length ?? 0) > 0) {
try { try {
console.log(`Generating git patch for project ${project.name} with ${result.filesWritten} files written`); console.log(`Generating git patch for project ${project.name} with ${result.filesWritten} files written`);
gitPatch = await this.sharedRepositoryService.generateGitPatch(projectRepoPath); gitPatch = await this.sharedRepositoryService.generateGitPatch(projectRepoPath);
} catch (error) { } catch (error) {
console.error(`Error generating git patch for project ${project.name}:`, error); console.error(`Error generating git patch for project ${project.name}:`, error);
} }
@ -50,6 +53,7 @@ export class ProjectTestSpecsService {
...result, ...result,
gitPatch gitPatch
}; };
} catch (error) { } catch (error) {
console.error(`Error processing project ${project.name}:`, error); console.error(`Error processing project ${project.name}:`, error);
return { return {
@ -83,26 +87,12 @@ export class ProjectTestSpecsService {
relevantFiles relevantFiles
); );
// Check status consistency console.log(`ProjectTestSpecsService: Completed processing project (Files written: ${result.fileWritten.length})`);
if (result.decision?.decision === 'skip') {
if (result.filesWritten.length > 0) {
throw new Error(`Skip decision with files written: ${result.filesWritten.join(', ')}`);
}
if (result.filesDeleted.length > 0) {
throw new Error(`Skip decision with files deleted: ${result.filesDeleted.join(', ')}`);
}
} else if (result.decision?.decision === 'create' || result.decision?.decision === 'update') {
if (result.filesWritten.length === 0) {
throw new Error(`${result.decision.decision} decision with no files written`);
}
}
console.log(`ProjectTestSpecsService: Completed processing project (Status: ${result.decision?.decision}, Files written: ${result.filesWritten.length})`);
return { return {
project: project, project: project,
success: true, success: true,
filesWritten: result.filesWritten, filesWritten: result.fileWritten,
filesRemoved: result.filesDeleted, filesRemoved: result.fileDeleted,
}; };
} catch (error) { } catch (error) {
console.error(`Error processing project ${project.name}:`, error); console.error(`Error processing project ${project.name}:`, error);
@ -158,25 +148,15 @@ export class ProjectTestSpecsService {
projectRepoPath: string, projectRepoPath: string,
guidelines: string, guidelines: string,
relevantFiles: Record<string, string> = {} relevantFiles: Record<string, string> = {}
): Promise<{ ): Promise<GeminiResponse> {
text: string;
decision?: { decision: TestSpecImplementationStatus; reason: string };
filesWritten: string[];
filesDeleted: string[];
}> {
const currentDate = new Date().toISOString();
// If dry run is enabled, return a mock implementation // If dry run is enabled, return a mock implementation
if (DRY_RUN_SKIP_GEMINI) { if (DRY_RUN_SKIP_GEMINI) {
const mockText = `# Generated by test-spec-to-test-implementation on ${currentDate} (DRY RUN)`; console.warn(`[DRY RUN] Skipping Gemini API call for processing`);
return { return {
text: mockText, modelResponses: [],
decision: { stepOutcomes: [],
decision: 'create', fileDeleted: [],
reason: 'This is a mock decision for dry run mode' fileWritten: []
},
filesWritten: [],
filesDeleted: []
}; };
} }
@ -204,11 +184,6 @@ export class ProjectTestSpecsService {
projectRepoPath projectRepoPath
); );
return { return result;
text: result.text,
decision: result.decision as { decision: TestSpecImplementationStatus; reason: string },
filesWritten: result.filesWritten,
filesDeleted: result.filesDeleted
};
} }
} }