This commit is contained in:
cghislai 2025-06-08 07:41:29 +02:00
parent 9e4b0c2abb
commit 0aed81875f
8 changed files with 674 additions and 1311 deletions

View File

@ -1,517 +0,0 @@
import * as fs from 'fs';
import * as path from 'path';
import { ModelStreamService } from '../model-stream-service';
import { GeminiProjectProcessor } from '../gemini-project-processor';
import { Workitem } from '../../types';
// Mock dependencies
jest.mock('fs');
jest.mock('path');
jest.mock('../../config', () => ({
get GOOGLE_CLOUD_PROJECT_ID() { return process.env.GOOGLE_CLOUD_PROJECT_ID || 'mock-project-id'; },
get GOOGLE_CLOUD_LOCATION() { return process.env.GOOGLE_CLOUD_LOCATION || 'mock-location'; },
get GEMINI_MODEL() { return process.env.GEMINI_MODEL || 'mock-model'; },
DRY_RUN_SKIP_GEMINI: false
}));
// Mock VertexAI
const mockGenerateContentStream = jest.fn();
const mockGenerateContent = jest.fn();
const mockGetGenerativeModel = jest.fn().mockReturnValue({
generateContentStream: mockGenerateContentStream,
generateContent: mockGenerateContent,
startChat: jest.fn()
});
jest.mock('@google-cloud/vertexai', () => {
return {
VertexAI: jest.fn().mockImplementation(() => {
return {
getGenerativeModel: mockGetGenerativeModel
};
}),
FunctionDeclarationSchemaType: {
OBJECT: 'OBJECT',
STRING: 'STRING'
}
};
});
describe('ModelStreamService', () => {
let modelStreamService: ModelStreamService;
let mockGeminiProjectProcessor: jest.Mocked<GeminiProjectProcessor>;
let mockWorkitem: Workitem;
beforeEach(() => {
jest.clearAllMocks();
// Reset all mocks
mockGenerateContentStream.mockReset();
mockGenerateContent.mockReset();
mockGetGenerativeModel.mockReset();
// Mock config values
process.env.GOOGLE_CLOUD_PROJECT_ID = 'mock-project-id';
process.env.GOOGLE_CLOUD_LOCATION = 'mock-location';
process.env.GEMINI_MODEL = 'mock-model';
// Mock workitem
mockWorkitem = {
name: 'test-workitem',
path: '/mock/path/to/workitem.md',
title: 'Test Workitem',
description: 'This is a test workitem',
isActive: true
};
// Mock GeminiProjectProcessor
mockGeminiProjectProcessor = {
getFileContent: jest.fn(),
writeFileContent: jest.fn(),
fileExists: jest.fn(),
listFiles: jest.fn(),
grepFiles: jest.fn(),
deleteFile: jest.fn(),
getCurrentWorkitem: jest.fn().mockReturnValue(mockWorkitem),
processProject: jest.fn(),
processWorkitem: jest.fn(),
generateFeatureFile: jest.fn(),
collectRelevantFiles: jest.fn(),
matchesPattern: jest.fn()
} as unknown as jest.Mocked<GeminiProjectProcessor>;
// Set up default mock behavior for generateContentStream
mockGetGenerativeModel.mockReturnValue({
generateContentStream: mockGenerateContentStream,
generateContent: mockGenerateContent
});
// Create a new instance of ModelStreamService
modelStreamService = new ModelStreamService(
mockGeminiProjectProcessor,
mockWorkitem
);
});
describe('processModelStream', () => {
it('should process model stream and handle function calls', async () => {
// Set up the mock response for the initial stream
const initialStreamResponse = {
stream: [
{
candidates: [
{
content: {
parts: [
{
functionCall: {
name: 'getFileContent',
args: JSON.stringify({ filePath: 'test/file.txt' })
}
}
]
}
}
]
}
]
};
// Set up the mock response for the next stream after function call
const nextStreamResponse = {
stream: [
{
candidates: [
{
content: {
parts: [
{
functionCall: {
name: 'writeFileContent',
args: JSON.stringify({
filePath: 'test/output.txt',
content: 'Test content'
})
}
}
]
}
}
]
}
]
};
// Set up the mock response for the final stream with decision
const finalStreamResponse = {
stream: [
{
candidates: [
{
content: {
parts: [
{
functionCall: {
name: 'makeDecision',
args: JSON.stringify({
decision: 'create',
reason: 'Created a new file'
})
}
}
]
}
}
]
},
{
candidates: [
{
content: {
parts: [
{ text: 'Processing complete' }
]
}
}
]
}
]
};
// Set up the mock implementations
mockGenerateContentStream
.mockResolvedValueOnce(initialStreamResponse)
.mockResolvedValueOnce(nextStreamResponse)
.mockResolvedValueOnce(finalStreamResponse)
.mockResolvedValue({ stream: [] }); // Add a default empty stream for any additional calls
mockGeminiProjectProcessor.getFileContent.mockReturnValue('Mock file content');
// Call the method
const result = await modelStreamService.processModelStream(
'Test guidelines',
'Test workitem content'
);
// Verify the result
expect(result.text).toContain('Processing complete');
expect(result.decision).toBeDefined();
expect(result.decision?.decision).toBe('create');
expect(result.decision?.reason).toBe('Created a new file');
expect(result.filesWritten).toContain('test/output.txt');
expect(result.filesDeleted).toHaveLength(0);
// Verify the function calls
expect(mockGeminiProjectProcessor.getFileContent).toHaveBeenCalledWith('test/file.txt');
expect(mockGeminiProjectProcessor.writeFileContent).toHaveBeenCalledWith(
'test/output.txt',
'Test content',
'test-workitem'
);
// Verify the generateContentStream was called
expect(mockGenerateContentStream).toHaveBeenCalledTimes(4);
});
it('should handle file deletion', async () => {
// Set up the mock response with a deleteFile function call
const streamResponse = {
stream: [
{
candidates: [
{
content: {
parts: [
{
functionCall: {
name: 'deleteFile',
args: JSON.stringify({ filePath: 'test/file-to-delete.txt' })
}
}
]
}
}
]
}
]
};
// Set up the mock response for the next stream after function call
const nextStreamResponse = {
stream: [
{
candidates: [
{
content: {
parts: [
{
functionCall: {
name: 'makeDecision',
args: JSON.stringify({
decision: 'delete',
reason: 'Deleted a file'
})
}
}
]
}
}
]
},
{
candidates: [
{
content: {
parts: [
{ text: 'Deletion complete' }
]
}
}
]
}
]
};
// Set up the mock implementations
mockGenerateContentStream
.mockResolvedValueOnce(streamResponse)
.mockResolvedValueOnce(nextStreamResponse)
.mockResolvedValue({ stream: [] }); // Add a default empty stream for any additional calls
mockGeminiProjectProcessor.deleteFile.mockReturnValue('File test/file-to-delete.txt deleted successfully');
// Call the method
const result = await modelStreamService.processModelStream(
'Test guidelines',
'Test workitem content'
);
// Verify the result
expect(result.text).toContain('Deletion complete');
expect(result.decision).toBeDefined();
expect(result.decision?.decision).toBe('delete');
expect(result.decision?.reason).toBe('Deleted a file');
expect(result.filesWritten).toHaveLength(0);
expect(result.filesDeleted).toContain('test/file-to-delete.txt');
// Verify the function calls
expect(mockGeminiProjectProcessor.deleteFile).toHaveBeenCalledWith('test/file-to-delete.txt');
// Verify the generateContentStream was called
expect(mockGenerateContentStream).toHaveBeenCalledTimes(3);
});
it('should handle errors in function calls', async () => {
// Set up the mock response with a function call that will fail
const streamResponse = {
stream: [
{
candidates: [
{
content: {
parts: [
{
functionCall: {
name: 'getFileContent',
args: JSON.stringify({ filePath: 'test/non-existent-file.txt' })
}
}
]
}
}
]
}
]
};
// Set up the mock response for the next stream after error
const nextStreamResponse = {
stream: [
{
candidates: [
{
content: {
parts: [
{
functionCall: {
name: 'makeDecision',
args: JSON.stringify({
decision: 'skip',
reason: 'File not found'
})
}
}
]
}
}
]
},
{
candidates: [
{
content: {
parts: [
{ text: 'Error handled' }
]
}
}
]
}
]
};
// Set up the mock implementations
mockGenerateContentStream
.mockResolvedValueOnce(streamResponse)
.mockResolvedValueOnce(nextStreamResponse)
.mockResolvedValue({ stream: [] }); // Add a default empty stream for any additional calls
mockGeminiProjectProcessor.getFileContent.mockImplementation(() => {
throw new Error('File not found');
});
// Call the method
const result = await modelStreamService.processModelStream(
'Test guidelines',
'Test workitem content'
);
// Verify the result
expect(result.text).toContain('Error handled');
expect(result.decision).toBeDefined();
expect(result.decision?.decision).toBe('skip');
expect(result.decision?.reason).toBe('File not found');
expect(result.filesWritten).toHaveLength(0);
expect(result.filesDeleted).toHaveLength(0);
// Verify the function calls
expect(mockGeminiProjectProcessor.getFileContent).toHaveBeenCalledWith('test/non-existent-file.txt');
// Verify the generateContentStream was called
expect(mockGenerateContentStream).toHaveBeenCalledTimes(3);
});
it('should parse decision from text if no makeDecision function call', async () => {
// Set up the mock response with text containing a JSON decision
const streamResponse = {
stream: [
{
candidates: [
{
content: {
parts: [
{
text: 'Some text before the decision { "decision": "skip", "reason": "No changes needed" } Some text after'
}
]
}
}
]
}
]
};
// Set up the mock implementations
mockGenerateContentStream
.mockResolvedValueOnce(streamResponse)
.mockResolvedValue({ stream: [] }); // Add a default empty stream for any additional calls
// Call the method
const result = await modelStreamService.processModelStream(
'Test guidelines',
'Test workitem content'
);
// Verify the result
expect(result.text).toContain('Some text before the decision');
expect(result.decision).toBeDefined();
expect(result.decision?.decision).toBe('skip');
expect(result.decision?.reason).toBe('No changes needed');
expect(result.filesWritten).toHaveLength(0);
expect(result.filesDeleted).toHaveLength(0);
// Verify the generateContentStream was called
expect(mockGenerateContentStream).toHaveBeenCalledTimes(1);
});
it('should handle dry run mode', async () => {
// Create a new service instance with dryRun set to true
const dryRunService = new ModelStreamService(
mockGeminiProjectProcessor,
mockWorkitem,
undefined, // projectId
undefined, // location
undefined, // model
true // dryRun
);
// Call the method
const result = await dryRunService.processModelStream(
'Test guidelines',
'Test workitem content'
);
// Verify the result
expect(result.text).toContain('DRY RUN');
expect(result.decision).toBeDefined();
expect(result.decision?.decision).toBe('create');
expect(result.decision?.reason).toBe('This is a mock decision for dry run mode');
expect(result.filesWritten).toHaveLength(0);
expect(result.filesDeleted).toHaveLength(0);
// Verify the generateContentStream was not called
expect(mockGenerateContentStream).not.toHaveBeenCalled();
});
});
describe('getModelResponses', () => {
it('should return all model responses', async () => {
// Set up the mock response with text
const streamResponse = {
stream: [
{
candidates: [
{
content: {
parts: [
{ text: 'Response 1' }
]
}
}
]
},
{
candidates: [
{
content: {
parts: [
{ text: 'Response 2' }
]
}
}
]
}
]
};
// Set up the mock implementations
mockGenerateContentStream
.mockResolvedValueOnce(streamResponse)
.mockResolvedValue({ stream: [] }); // Add a default empty stream for any additional calls
// Call the method
await modelStreamService.processModelStream(
'Test guidelines',
'Test workitem content'
);
// Get the model responses
const responses = modelStreamService.getModelResponses();
// Verify the responses
expect(responses).toHaveLength(2);
expect(responses[0]).toBe('Response 1');
expect(responses[1]).toBe('Response 2');
});
});
});

View File

@ -1,6 +1,6 @@
import * as fs from 'fs';
import * as path from 'path';
import { ProjectService } from '../project-service';
import {ProjectService} from '../project-service';
// Mock fs and path modules
jest.mock('fs');
@ -260,7 +260,8 @@ This is a description of the workitem.
(fs.readFileSync as jest.Mock).mockReturnValueOnce(workitemContent);
// Mock fs.writeFileSync
(fs.writeFileSync as jest.Mock).mockImplementationOnce(() => {});
(fs.writeFileSync as jest.Mock).mockImplementationOnce(() => {
});
const updatedWorkitem = await projectService.updateWorkitemWithPullRequestUrl(workitem, pullRequestUrl);
@ -314,7 +315,8 @@ This is a description of the workitem.
(fs.readFileSync as jest.Mock).mockReturnValueOnce(workitemContent);
// Mock fs.writeFileSync
(fs.writeFileSync as jest.Mock).mockImplementationOnce(() => {});
(fs.writeFileSync as jest.Mock).mockImplementationOnce(() => {
});
const updatedWorkitem = await projectService.updateWorkitemWithPullRequestUrl(workitem, pullRequestUrl);

View File

@ -7,55 +7,47 @@ import {ProcessResult, Project, Workitem} from '../types';
import {ProjectService} from './project-service';
import {RepositoryService} from './repository-service';
import {DRY_RUN_SKIP_GEMINI} from '../config';
import {ModelStreamService} from './model-stream-service';
import {GeminiFileSystemService} from 'shared-functions';
export class GeminiProjectProcessor {
private projectService: ProjectService;
private repositoryService: RepositoryService;
private project: Project;
private projectRepoPath: string;
private filesWritten: Map<string, string[]> = new Map(); // Map of workitem name to files written
private currentWorkitem: Workitem | null = null; // Track the current workitem being processed
constructor(
project: Project,
projectRepoPath: string,
mainRepoPath: string
) {
this.project = project;
this.projectRepoPath = projectRepoPath;
constructor() {
this.projectService = new ProjectService();
this.repositoryService = new RepositoryService();
}
/**
* Process the project using Gemini
* @param project Project to process
* @param projectRepoPath Path to the project repository
* @returns Process result
*/
async processProject(): Promise<ProcessResult> {
console.log(`GeminiProjectProcessor: Processing project ${this.project.name}`);
async processProject(project: Project, projectRepoPath: string): Promise<ProcessResult> {
console.log(`GeminiProjectProcessor: Processing project ${project.name}`);
try {
// Find all workitems in the project
const workitems = await this.projectService.findWorkitems(this.project.path);
console.log(`GeminiProjectProcessor: Found ${workitems.length} workitems in project ${this.project.name}`);
const workitems = await this.projectService.findWorkitems(project.path);
console.log(`GeminiProjectProcessor: Found ${workitems.length} workitems in project ${project.name}`);
// Skip if no workitems found
if (workitems.length === 0) {
return {
project: this.project,
project: project,
processedWorkitems: []
};
}
// Read project guidelines
const projectGuidelines = await this.projectService.readProjectGuidelines(this.project.path);
const projectGuidelines = await this.projectService.readProjectGuidelines(project.path);
// Process each workitem
const processedWorkitems = [];
for (const workitem of workitems) {
console.log(`GeminiProjectProcessor: Processing workitem: ${workitem.name}`);
const result = await this.processWorkitem(workitem, projectGuidelines);
const result = await this.processWorkitem(project, projectRepoPath, workitem, projectGuidelines);
processedWorkitems.push({workitem, ...result});
}
@ -65,138 +57,108 @@ export class GeminiProjectProcessor {
if (totalFilesWritten > 0) {
try {
console.log(`Generating git patch for project ${this.project.name} with ${totalFilesWritten} files written`);
gitPatch = await this.repositoryService.generateGitPatch(this.projectRepoPath);
console.log(`Generating git patch for project ${project.name} with ${totalFilesWritten} files written`);
gitPatch = await this.repositoryService.generateGitPatch(projectRepoPath);
} catch (error) {
console.error(`Error generating git patch for project ${this.project.name}:`, error);
console.error(`Error generating git patch for project ${project.name}:`, error);
}
}
return {
project: this.project,
project: project,
processedWorkitems,
gitPatch
};
} catch (error) {
console.error(`Error processing project ${this.project.name}:`, error);
console.error(`Error processing project ${project.name}:`, error);
return {
project: this.project,
project: project,
processedWorkitems: [],
error: error instanceof Error ? error.message : String(error)
};
}
}
/**
* Get the current workitem being processed
* @returns The current workitem or null if no workitem is being processed
*/
getCurrentWorkitem(): Workitem | null {
return this.currentWorkitem;
}
/**
* Process a workitem using Gemini
* @param project Project containing the workitem
* @param projectRepoPath Path to the project repository
* @param workitem Workitem to process
* @param projectGuidelines Project guidelines
* @returns Result of the processing
*/
private async processWorkitem(
project: Project,
projectRepoPath: string,
workitem: Workitem,
projectGuidelines: string
): Promise<{
success: boolean;
error?: string;
status?: 'skipped' | 'updated' | 'created';
filesWritten?: string[]
decision?: 'skip' | 'update' | 'create' | 'delete';
filesWritten?: string[],
filesRemoved?: string[],
}> {
try {
// Set the current workitem
this.currentWorkitem = workitem;
console.log(`GeminiProjectProcessor: Processing workitem: ${workitem.name} (Active: ${workitem.isActive})`);
// Initialize tracking for this workitem
this.filesWritten.set(workitem.name, []);
// Determine initial status based on workitem activity
let status: 'skipped' | 'updated' | 'created' = 'skipped';
// Read workitem content
const workitemContent = fs.readFileSync(workitem.path, 'utf-8');
// Collect all relevant files from the project directory
const relevantFiles = await this.collectRelevantFiles(workitem);
const relevantFiles = await this.collectRelevantFiles(project, projectRepoPath, workitem);
// Let Gemini decide what to do with the workitem
const result = await this.generateFeatureFile(
projectRepoPath,
projectGuidelines,
workitemContent,
workitem.name,
relevantFiles
);
// Track files written and deleted from the ModelStreamService result
const filesWritten = [...result.filesWritten];
const filesDeleted = result.filesDeleted;
// Add deleted files to the tracking with the "deleted:" prefix
filesDeleted.forEach(file => {
filesWritten.push(`deleted:${file}`);
});
// Update the filesWritten map
this.filesWritten.set(workitem.name, filesWritten);
// Use the model's decision to determine the status if available
if (result.decision) {
console.log(`Using model decision: ${result.decision.decision} for workitem ${workitem.name}`);
// Update status based on the model's decision
switch (result.decision.decision) {
case 'create':
status = 'created';
break;
case 'update':
status = 'updated';
break;
case 'delete':
// Keep status as 'skipped' for delete if no files were actually deleted
if (filesDeleted.length > 0) {
status = 'updated'; // We use 'updated' for deletions too
const decision = result.decision?.decision ?? 'skip';
// Check status consistency
switch (decision) {
case "skip":
if (result.filesWritten.length > 0) {
throw new Error(`Skip decision with files written: ${result.filesWritten.join(', ')}`);
}
if (result.filesDeleted.length > 0) {
throw new Error(`Skip decision with files deleted: ${result.filesDeleted.join(', ')}`);
}
break;
case 'skip':
status = 'skipped';
case "create":
if (result.filesWritten.length === 0) {
throw new Error(`Create decision with no files written`);
}
break;
case "update":
if (result.filesWritten.length === 0) {
throw new Error(`Update decision with no files written`);
}
break;
case "delete":
if (result.filesDeleted.length === 0) {
throw new Error(`Delete decision with no files deleted`);
}
break;
}
} else {
// Fallback to the old behavior if no decision is available
// If no files were written or deleted, consider it skipped
if (filesWritten.length === 0 && filesDeleted.length === 0) {
status = 'skipped';
} else if (filesWritten.length > 0 || filesDeleted.length > 0) {
// If files were written or deleted, consider it created/updated
status = filesWritten.length > 0 ? 'created' : 'updated';
}
}
// Update the workitem file with implementation log
if (status !== 'skipped') {
if (decision !== 'skip') {
try {
// Determine the log status based on the operation status
const logStatus = status === 'created' ? 'created' :
(status === 'updated' ? 'updated' : 'deleted');
// Get the list of files without the "deleted:" prefix for deleted files
const filesList = filesWritten.map(file =>
file.startsWith('deleted:') ? file.substring(8) : file
);
const logStatus = decision;
// Update the workitem file with implementation log
await this.projectService.updateWorkitemWithImplementationLog(
workitem,
logStatus,
filesList
result.filesWritten,
result.filesDeleted
);
console.log(`GeminiProjectProcessor: Updated workitem file with implementation log for ${workitem.name}`);
@ -205,57 +167,43 @@ export class GeminiProjectProcessor {
}
}
console.log(`GeminiProjectProcessor: Completed processing workitem: ${workitem.name} (Status: ${status}, Files written: ${filesWritten.length})`);
console.log(`GeminiProjectProcessor: Completed processing workitem: ${workitem.name} (Status: ${decision}, Files written: ${result.filesWritten.length})`);
return {
success: true,
status,
filesWritten
decision,
filesWritten: result.filesWritten,
filesRemoved: result.filesDeleted,
};
} catch (error) {
console.error(`Error processing workitem ${workitem.name}:`, error);
return {
success: false,
error: error instanceof Error ? error.message : String(error),
status: 'skipped',
filesWritten: []
};
}
}
/**
* Collect relevant files from the project directory
* @param workitem The workitem being processed
* @param project The project info
* @param workitem The workitem being processed (for logging purposes)
* @returns Object containing file contents
*/
private async collectRelevantFiles(workitem: Workitem): Promise<Record<string, string>> {
private async collectRelevantFiles(project: Project, projectRepoPath: string, workitem: Workitem): Promise<Record<string, string>> {
const relevantFiles: Record<string, string> = {};
try {
// Get the project directory path
const projectDir = path.dirname(path.dirname(workitem.path)); // workitem.path -> workitems/name.md -> project/
// Check for INFO.md
const infoPath = path.join(projectDir, 'INFO.md');
if (fs.existsSync(infoPath)) {
relevantFiles['INFO.md'] = fs.readFileSync(infoPath, 'utf-8');
}
// AI.md is already included in the main prompt
// Check for other potentially relevant files
const potentialFiles = [
'README.md',
'GUIDELINES.md',
'ARCHITECTURE.md',
'IMPLEMENTATION.md'
const guidelinePaths = project.aiGuidelines?.split(',') ?? [
'INFO.md', 'README.md', 'GUIDELINES.md', 'ARCHITECTURE.md', 'IMPLEMENTATION.md'
];
for (const file of potentialFiles) {
const filePath = path.join(projectDir, file);
guidelinePaths
.map(g => g.trim())
.forEach(fileName => {
const filePath = path.join(projectRepoPath, fileName);
if (fs.existsSync(filePath)) {
relevantFiles[file] = fs.readFileSync(filePath, 'utf-8');
}
relevantFiles[fileName] = fs.readFileSync(filePath, 'utf-8');
}
});
console.log(`GeminiProjectProcessor: Collected ${Object.keys(relevantFiles).length} relevant files for workitem ${workitem.name}`);
} catch (error) {
@ -267,6 +215,7 @@ export class GeminiProjectProcessor {
/**
* Generate feature file content using Gemini API
* @param projectRepoPath Path to the project repository
* @param guidelines Project guidelines
* @param workitemContent Workitem content
* @param workitemName Name of the workitem
@ -274,6 +223,7 @@ export class GeminiProjectProcessor {
* @returns Object containing the generated text, parsed decision, and files written/deleted
*/
private async generateFeatureFile(
projectRepoPath: string,
guidelines: string,
workitemContent: string,
workitemName: string,
@ -289,18 +239,7 @@ export class GeminiProjectProcessor {
// If dry run is enabled, return a mock feature file
if (DRY_RUN_SKIP_GEMINI) {
console.log(`[DRY RUN] Skipping Gemini API call for generating feature file for ${workitemName}`);
const mockText = `# Generated by prompts-to-test-spec on ${currentDate} (DRY RUN)
# Source: ${workitemName}
Feature: ${workitemName} (DRY RUN)
This is a mock feature file generated during dry run.
No actual Gemini API call was made.
Scenario: Mock scenario
Given a dry run is enabled
When the feature file is generated
Then a mock feature file is returned
`;
const mockText = `# Generated by prompts-to-test-spec on ${currentDate} (DRY RUN)`;
return {
text: mockText,
decision: {
@ -320,23 +259,22 @@ Feature: ${workitemName} (DRY RUN)
additionalContext += `\n--- ${filename} ---\n${content}\n`;
}
// Get the current workitem
const currentWorkitem = this.getCurrentWorkitem();
if (!currentWorkitem) {
throw new Error(`No current workitem set for ${workitemName}`);
}
// Import required configuration
const {GOOGLE_CLOUD_PROJECT_ID, GOOGLE_CLOUD_LOCATION, GEMINI_MODEL} = require('../config');
// Create a new ModelStreamService for this workitem
const modelStreamService = new ModelStreamService(
this,
currentWorkitem
// Initialize the GeminiFileSystemService directly
const geminiFileSystemService = new GeminiFileSystemService(
GOOGLE_CLOUD_PROJECT_ID,
GOOGLE_CLOUD_LOCATION,
GEMINI_MODEL,
DRY_RUN_SKIP_GEMINI
);
// Process the model stream
const result = await modelStreamService.processModelStream(
const result = await geminiFileSystemService.processModelStream(
guidelines,
workitemContent,
additionalContext
projectRepoPath
);
return {
@ -347,180 +285,5 @@ Feature: ${workitemName} (DRY RUN)
};
}
/**
* Get the content of a file in the project repository
* @param filePath Path to the file relative to the project repository root
* @returns File content
*/
getFileContent(filePath: string): string {
const fullPath = path.join(this.projectRepoPath, filePath);
if (!fs.existsSync(fullPath)) {
throw new Error(`File not found: ${filePath}`);
}
return fs.readFileSync(fullPath, 'utf-8');
}
/**
* Write content to a file in the project repository
* @param filePath Path to the file relative to the project repository root
* @param content Content to write
* @param workitemName Optional name of the workitem being processed
*/
writeFileContent(filePath: string, content: string, workitemName?: string): void {
const fullPath = path.join(this.projectRepoPath, filePath);
const dirPath = path.dirname(fullPath);
// Ensure directory exists
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, {recursive: true});
}
fs.writeFileSync(fullPath, content, 'utf-8');
// Track the file operation if workitemName is provided
if (workitemName) {
if (!this.filesWritten.has(workitemName)) {
this.filesWritten.set(workitemName, []);
}
this.filesWritten.get(workitemName)!.push(filePath);
console.log(`Tracked file write for workitem ${workitemName}: ${filePath}`);
}
}
/**
* Check if a file exists in the project repository
* @param filePath Path to the file relative to the project repository root
* @returns True if the file exists, false otherwise
*/
fileExists(filePath: string): boolean {
const fullPath = path.join(this.projectRepoPath, filePath);
return fs.existsSync(fullPath);
}
/**
* Delete a file from the project repository
* @param filePath Path to the file relative to the project repository root
* @returns Message indicating success or that the file didn't exist
*/
deleteFile(filePath: string): string {
const fullPath = path.join(this.projectRepoPath, filePath);
if (!fs.existsSync(fullPath)) {
return `File ${filePath} does not exist`;
}
fs.unlinkSync(fullPath);
// Track the file operation using the current workitem
const currentWorkitem = this.getCurrentWorkitem();
if (currentWorkitem) {
const workitemName = currentWorkitem.name;
if (!this.filesWritten.has(workitemName)) {
this.filesWritten.set(workitemName, []);
}
// We're tracking deletions in the same array as writes, but with a "deleted:" prefix
this.filesWritten.get(workitemName)!.push(`deleted:${filePath}`);
console.log(`Tracked file deletion for workitem ${workitemName}: ${filePath}`);
}
return `File ${filePath} deleted successfully`;
}
/**
* List files in a directory in the project repository
* @param dirPath Path to the directory relative to the project repository root
* @returns Array of file names
*/
listFiles(dirPath: string): string[] {
const fullPath = path.join(this.projectRepoPath, dirPath);
if (!fs.existsSync(fullPath)) {
throw new Error(`Directory not found: ${dirPath}`);
}
return fs.readdirSync(fullPath);
}
/**
* Search for a string in project files
* @param searchString String to search for
* @param filePattern Optional file pattern to limit the search (e.g., "*.ts", "src/*.java")
* @returns Array of matches with file paths and line numbers
*/
grepFiles(searchString: string, filePattern?: string): Array<{ file: string, line: number, content: string }> {
console.log(`Searching for "${searchString}" in project files${filePattern ? ` matching ${filePattern}` : ''}`);
if (!searchString) {
throw new Error('Search string is required');
}
const results: Array<{ file: string, line: number, content: string }> = [];
// Helper function to search in a file
const searchInFile = (filePath: string, relativePath: string) => {
try {
const content = fs.readFileSync(filePath, 'utf-8');
const lines = content.split('\n');
for (let i = 0; i < lines.length; i++) {
if (lines[i].includes(searchString)) {
results.push({
file: relativePath,
line: i + 1, // 1-based line numbers
content: lines[i].trim()
});
}
}
} catch (error) {
console.error(`Error searching in file ${filePath}:`, error);
}
};
// Helper function to recursively search in a directory
const searchInDirectory = (dirPath: string, baseDir: string) => {
try {
const entries = fs.readdirSync(dirPath, {withFileTypes: true});
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
const relativePath = path.relative(baseDir, fullPath);
if (entry.isDirectory()) {
// Skip node_modules and .git directories
if (entry.name !== 'node_modules' && entry.name !== '.git') {
searchInDirectory(fullPath, baseDir);
}
} else if (entry.isFile()) {
// Check if the file matches the pattern
if (!filePattern || this.matchesPattern(entry.name, filePattern)) {
searchInFile(fullPath, relativePath);
}
}
}
} catch (error) {
console.error(`Error searching in directory ${dirPath}:`, error);
}
};
// Start the search from the project repository root
searchInDirectory(this.projectRepoPath, this.projectRepoPath);
console.log(`Found ${results.length} matches for "${searchString}"`);
return results;
}
/**
* Check if a filename matches a simple pattern
* @param filename Filename to check
* @param pattern Pattern to match (supports * wildcard)
* @returns True if the filename matches the pattern
*/
private matchesPattern(filename: string, pattern: string): boolean {
// Convert the pattern to a regex
// Escape special regex characters except *
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex chars
.replace(/\*/g, '.*'); // Convert * to .*
const regex = new RegExp(`^${regexPattern}$`);
return regex.test(filename);
}
}

View File

@ -233,15 +233,11 @@ export class ProcessorService {
const projectRepoPath = await this.repositoryService.cloneProjectRepository(project, credentials);
// Create a GeminiProjectProcessor to handle the project
const geminiProjectProcessor = new GeminiProjectProcessor(
project,
projectRepoPath,
mainRepoPath
);
const geminiProjectProcessor = new GeminiProjectProcessor();
// Let Gemini operate within the project
console.log(`Letting Gemini operate within project: ${project.name}`);
const result = await geminiProjectProcessor.processProject();
const result = await geminiProjectProcessor.processProject(project, projectRepoPath);
// If no workitems were processed or there was an error, return early
if (result.processedWorkitems.length === 0 || result.error) {

View File

@ -3,7 +3,7 @@
*/
import * as fs from 'fs';
import * as path from 'path';
import { ProjectService as SharedProjectService, Project, Workitem } from 'shared-functions';
import {ProjectService as SharedProjectService, Project, Workitem} from 'shared-functions';
export class ProjectService {
private sharedProjectService: SharedProjectService;
@ -182,8 +182,9 @@ export class ProjectService {
*/
async updateWorkitemWithImplementationLog(
workitem: Workitem,
status: 'created' | 'updated' | 'deleted',
files: string[]
status: 'create' | 'update' | 'delete',
filesWritten: string[],
filesRemoved: string[],
): Promise<Workitem> {
if (!fs.existsSync(workitem.path)) {
throw new Error(`Workitem file not found: ${workitem.path}`);
@ -198,24 +199,23 @@ export class ProjectService {
let logMessage = `${timestamp} - `;
switch (status) {
case 'created':
logMessage += `Workitem has been implemented. Created files:\n`;
case 'create':
logMessage += `Workitem has been implemented.\n`;
break;
case 'updated':
logMessage += `Workitem has been updated. Modified files:\n`;
case 'update':
logMessage += `Workitem has been updated.\n`;
break;
case 'deleted':
logMessage += `Workitem has been deleted. Removed files:\n`;
case 'delete':
logMessage += `Workitem has been deleted.\n`;
break;
}
// Add the list of files
if (files.length > 0) {
for (const file of files) {
logMessage += `- ${file}\n`;
for (const file of filesWritten) {
logMessage += `- Created ${file}\n`;
}
} else {
logMessage += `No files were affected.\n`;
for (const file of filesRemoved) {
logMessage += `- Removed ${file}\n`;
}
// Add PR URL if available

View File

@ -1,7 +1,13 @@
/**
* Service for handling pull request operations
*/
import {PullRequestService as SharedPullRequestService, Project, RepoCredentials, Workitem, GeminiService} from 'shared-functions';
import {
PullRequestService as SharedPullRequestService,
Project,
RepoCredentials,
Workitem,
GeminiService
} from 'shared-functions';
import {GOOGLE_CLOUD_PROJECT_ID, GOOGLE_CLOUD_LOCATION, GEMINI_MODEL, DRY_RUN_SKIP_GEMINI} from '../config';
export class PullRequestService {

View File

@ -10,3 +10,4 @@ export { ProjectService } from './services/project-service';
export { RepositoryService } from './services/repository-service';
export { PullRequestService } from './services/pull-request-service';
export { GeminiService } from './services/gemini-service';
export { GeminiFileSystemService } from './services/gemini-file-system-service';

View File

@ -1,6 +1,8 @@
/**
* Service for handling model streams for specific workitems
* Service for handling file system operations with Gemini integration
*/
import * as fs from 'fs';
import * as path from 'path';
import {
FunctionDeclarationSchemaType,
GenerateContentCandidate,
@ -8,22 +10,11 @@ import {
Tool,
VertexAI
} from '@google-cloud/vertexai';
import {Workitem} from '../types';
import {DRY_RUN_SKIP_GEMINI, GEMINI_MODEL, GOOGLE_CLOUD_LOCATION, GOOGLE_CLOUD_PROJECT_ID} from '../config';
import {GeminiProjectProcessor} from './gemini-project-processor';
/**
* Interface for the model response format
*/
interface ModelResponse {
decision: 'create' | 'update' | 'delete' | 'skip';
reason: string;
}
/**
* Interface for function arguments
*/
interface FunctionArgs {
export interface FunctionArgs {
filePath?: string;
content?: string;
dirPath?: string;
@ -34,11 +25,11 @@ interface FunctionArgs {
}
/**
* Interface for streaming response item
* Interface for the model response format
*/
interface StreamResponseItem {
candidates?: GenerateContentCandidate[];
usageMetadata?: any;
export interface ModelResponse {
decision: 'create' | 'update' | 'delete' | 'skip';
reason: string;
}
/**
@ -47,50 +38,30 @@ interface StreamResponseItem {
export interface ModelStreamResult {
text: string;
decision?: ModelResponse;
modelResponses: string[];
filesWritten: string[];
filesDeleted: string[];
}
/**
* Service for handling model streams for specific workitems
* Service for handling file system operations with Gemini integration
*/
export class ModelStreamService {
export class GeminiFileSystemService {
private vertexAI: VertexAI;
private model: string;
private projectId: string;
private location: string;
private fileOperationTools: Tool[];
private geminiProjectProcessor: GeminiProjectProcessor;
private workitem: Workitem;
// State tracking
private filesWritten: string[] = [];
private filesDeleted: string[] = [];
private modelResponses: string[] = [];
private decision?: ModelResponse;
/**
* Create a new ModelStreamService instance
* @param geminiProjectProcessor GeminiProjectProcessor to handle function calls
* @param workitem Workitem being processed
* @param projectId Google Cloud project ID (defaults to GOOGLE_CLOUD_PROJECT_ID from config)
* @param location Google Cloud location (defaults to GOOGLE_CLOUD_LOCATION from config)
* @param model Gemini model to use (defaults to GEMINI_MODEL from config)
* Create a new GeminiFileSystemService instance
* @param projectId Google Cloud project ID
* @param location Google Cloud location
* @param model Gemini model to use
*/
constructor(
geminiProjectProcessor: GeminiProjectProcessor,
workitem: Workitem,
projectId?: string,
location?: string,
model?: string,
private dryRun?: boolean
private projectId: string,
private location: string = 'us-central1',
private model: string = 'gemini-1.5-pro',
private dryRun: boolean = false
) {
this.geminiProjectProcessor = geminiProjectProcessor;
this.workitem = workitem;
this.projectId = projectId || GOOGLE_CLOUD_PROJECT_ID;
this.location = location || GOOGLE_CLOUD_LOCATION;
this.model = model || GEMINI_MODEL;
if (!this.projectId) {
throw new Error('Google Cloud Project ID is required');
}
@ -101,7 +72,7 @@ export class ModelStreamService {
location: this.location,
});
// Define file operation functions and decision function
// Define file operation functions
this.fileOperationTools = [
{
function_declarations: [
@ -222,29 +193,188 @@ export class ModelStreamService {
}
/**
* Process a model stream for a workitem
* @param guidelines Project guidelines
* @param workitemContent Workitem content
* @param additionalContext Optional additional context from relevant files
* Get the content of a file
* @param filePath Path to the file relative to the root path
* @returns File content
*/
getFileContent(rootPath: string, filePath: string): string {
const fullPath = path.join(rootPath, filePath);
if (!fs.existsSync(fullPath)) {
throw new Error(`File not found: ${filePath}`);
}
return fs.readFileSync(fullPath, 'utf-8');
}
/**
* Write content to a file
* @param filePath Path to the file relative to the root path
* @param content Content to write
*/
writeFileContent(rootPath: string, filePath: string, content: string): void {
const fullPath = path.join(rootPath, filePath);
const dirPath = path.dirname(fullPath);
// Ensure directory exists
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, {recursive: true});
}
fs.writeFileSync(fullPath, content, 'utf-8');
}
/**
* Check if a file exists
* @param filePath Path to the file relative to the root path
* @returns True if the file exists, false otherwise
*/
fileExists(rootPath: string, filePath: string): boolean {
const fullPath = path.join(rootPath, filePath);
return fs.existsSync(fullPath);
}
/**
* Delete a file
* @param filePath Path to the file relative to the root path
* @returns Message indicating success or that the file didn't exist
*/
deleteFile(rootPath: string, filePath: string): string {
const fullPath = path.join(rootPath, filePath);
if (!fs.existsSync(fullPath)) {
return `File ${filePath} does not exist`;
}
fs.unlinkSync(fullPath);
return `File ${filePath} deleted successfully`;
}
/**
* List files in a directory
* @param dirPath Path to the directory relative to the root path
* @returns Array of file names
*/
listFiles(rootPath: string, dirPath: string): string[] {
const fullPath = path.join(rootPath, dirPath);
if (!fs.existsSync(fullPath)) {
throw new Error(`Directory not found: ${dirPath}`);
}
return fs.readdirSync(fullPath);
}
/**
* Search for a string in files
* @param searchString String to search for
* @param filePattern Optional file pattern to limit the search (e.g., "*.ts", "src/*.java")
* @returns Array of matches with file paths and line numbers
*/
grepFiles(rootPath: string, searchString: string, filePattern?: string): Array<{
file: string,
line: number,
content: string
}> {
console.log(`Searching for "${searchString}" in files${filePattern ? ` matching ${filePattern}` : ''}`);
if (!searchString) {
throw new Error('Search string is required');
}
const results: Array<{ file: string, line: number, content: string }> = [];
// Helper function to search in a file
const searchInFile = (filePath: string, relativePath: string) => {
try {
const content = fs.readFileSync(filePath, 'utf-8');
const lines = content.split('\n');
for (let i = 0; i < lines.length; i++) {
if (lines[i].includes(searchString)) {
results.push({
file: relativePath,
line: i + 1, // 1-based line numbers
content: lines[i].trim()
});
}
}
} catch (error) {
console.error(`Error searching in file ${filePath}:`, error);
}
};
// Helper function to recursively search in a directory
const searchInDirectory = (dirPath: string, baseDir: string) => {
try {
const entries = fs.readdirSync(dirPath, {withFileTypes: true});
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
const relativePath = path.relative(baseDir, fullPath);
if (entry.isDirectory()) {
// Skip node_modules and .git directories
if (entry.name !== 'node_modules' && entry.name !== '.git') {
searchInDirectory(fullPath, baseDir);
}
} else if (entry.isFile()) {
// Check if the file matches the pattern
if (!filePattern || this.matchesPattern(entry.name, filePattern)) {
searchInFile(fullPath, relativePath);
}
}
}
} catch (error) {
console.error(`Error searching in directory ${dirPath}:`, error);
}
};
// Start the search from the root path
searchInDirectory(rootPath, rootPath);
console.log(`Found ${results.length} matches for "${searchString}"`);
return results;
}
/**
* Check if a filename matches a simple pattern
* @param filename Filename to check
* @param pattern Pattern to match (supports * wildcard)
* @returns True if the filename matches the pattern
*/
private matchesPattern(filename: string, pattern: string): boolean {
// Convert the pattern to a regex
// Escape special regex characters except *
const regexPattern = pattern
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex chars
.replace(/\*/g, '.*'); // Convert * to .*
const regex = new RegExp(`^${regexPattern}$`);
return regex.test(filename);
}
/**
* Process a model stream with file system operations
* @param guidelines Guidelines content to include in the prompt
* @param additionalContent Additional content to include in the prompt
* @param rootPath Root path of the filesystem exposed to Gemini
* @returns Object containing the generated text, parsed decision, and files written/deleted
*/
async processModelStream(
guidelines: string,
workitemContent: string,
additionalContext: string = ''
additionalContent: string,
rootPath: string
): Promise<ModelStreamResult> {
const currentDate = new Date().toISOString();
// If dry run is enabled, return a mock result
if (this.dryRun || DRY_RUN_SKIP_GEMINI) {
console.log(`[DRY RUN] Skipping Gemini API call for processing workitem ${this.workitem.name}`);
const mockText = `# Generated by prompts-to-test-spec on ${currentDate} (DRY RUN)`;
if (this.dryRun) {
console.log(`[DRY RUN] Skipping Gemini API call for processing`);
const mockText = `# Generated on ${currentDate} (DRY RUN)`;
return {
text: mockText,
decision: {
decision: 'create',
reason: 'This is a mock decision for dry run mode'
},
modelResponses: [],
filesWritten: [],
filesDeleted: []
};
@ -254,15 +384,7 @@ export class ModelStreamService {
const prompt = `
${guidelines}
Workitem:
${workitemContent}
You are tasked with implementing the workitem in the project repository according to the guidelines provided.
You have full control over how to implement the workitem, and you can decide what actions to take.
Include the following comment at the top of any generated files:
# Generated by prompts-to-test-spec on ${currentDate}
# Source: ${this.workitem.name}
${additionalContent}
You have access to the following function calls to help you understand the project structure and create implementations:
- getFileContent(filePath): Get the content of a file in the project repository
@ -273,13 +395,9 @@ You have access to the following function calls to help you understand the proje
- deleteFile(filePath): Delete a file from the project repository
- makeDecision(decision, reason): State your decision about implementing the workitem. Decision must be one of: 'create', 'update', 'delete', 'skip'
You can decide whether to create, update, delete or skip implementing this workitem based on your analysis.
IMPORTANT!!: First use the function calls above to actually implement the workitem. Make all necessary function calls to fully implement the workitem.
After you have implemented the workitem using function calls, use the makeDecision function to state your final decision with a reason.
${additionalContext ? `\nAdditional context from project files:${additionalContext}` : ''}
`;
// Instantiate the model with our file operation tools
@ -304,6 +422,11 @@ ${additionalContext ? `\nAdditional context from project files:${additionalConte
// Generate content in a streaming fashion
const streamingResp = await generativeModel.generateContentStream(request);
// Track state within the method scope
const filesWritten: string[] = [];
const filesDeleted: string[] = [];
const modelResponses: string[] = [];
let decision: ModelResponse | undefined;
let finalResponse = '';
let pendingFunctionCalls = [];
@ -338,7 +461,7 @@ ${additionalContext ? `\nAdditional context from project files:${additionalConte
} else if (textContent) {
// If there's text, append it to the final response
finalResponse += textContent;
this.modelResponses.push(textContent);
modelResponses.push(textContent);
}
}
@ -361,31 +484,31 @@ ${additionalContext ? `\nAdditional context from project files:${additionalConte
// Execute the function
switch (functionName) {
case 'getFileContent':
functionResponse = this.geminiProjectProcessor.getFileContent(functionArgs.filePath!);
functionResponse = this.getFileContent(rootPath, functionArgs.filePath!);
break;
case 'writeFileContent':
this.geminiProjectProcessor.writeFileContent(functionArgs.filePath!, functionArgs.content!, this.workitem.name);
this.writeFileContent(rootPath, functionArgs.filePath!, functionArgs.content!);
functionResponse = `File ${functionArgs.filePath} written successfully`;
// Track the file written
this.filesWritten.push(functionArgs.filePath!);
filesWritten.push(functionArgs.filePath!);
break;
case 'fileExists':
functionResponse = this.geminiProjectProcessor.fileExists(functionArgs.filePath!);
functionResponse = this.fileExists(rootPath, functionArgs.filePath!);
break;
case 'listFiles':
functionResponse = this.geminiProjectProcessor.listFiles(functionArgs.dirPath!);
functionResponse = this.listFiles(rootPath, functionArgs.dirPath!);
break;
case 'grepFiles':
functionResponse = this.geminiProjectProcessor.grepFiles(functionArgs.searchString!, functionArgs.filePattern);
functionResponse = this.grepFiles(rootPath, functionArgs.searchString!, functionArgs.filePattern);
break;
case 'deleteFile':
functionResponse = this.geminiProjectProcessor.deleteFile(functionArgs.filePath!);
functionResponse = this.deleteFile(rootPath, functionArgs.filePath!);
// Track the file deleted
this.filesDeleted.push(functionArgs.filePath!);
filesDeleted.push(functionArgs.filePath!);
break;
case 'makeDecision':
// Store the decision
this.decision = {
decision = {
decision: functionArgs.decision!,
reason: functionArgs.reason!
};
@ -403,7 +526,94 @@ ${additionalContext ? `\nAdditional context from project files:${additionalConte
};
// Update the request with the function call and response
currentRequest = {
currentRequest = this.createNextRequest(currentRequest, functionCall, functionResponseObj);
// Generate the next response
const nextStreamingResp = await generativeModel.generateContentStream(currentRequest);
// Process the next streaming response
const nextResult = await this.processNextStreamingResponse(nextStreamingResp);
// Update state
finalResponse += nextResult.textContent;
if (nextResult.textContent) {
modelResponses.push(nextResult.textContent);
}
if (nextResult.functionCall) {
pendingFunctionCalls.push(nextResult.functionCall);
}
} catch (error) {
console.error(`Error executing function ${functionName}:`, error);
// Create an error response object
const errorResponseObj = {
name: functionName,
response: {error: error instanceof Error ? error.message : String(error)}
};
// Update the request with the function call and error response
currentRequest = this.createNextRequest(currentRequest, functionCall, errorResponseObj, true);
// Generate the next response
const nextStreamingResp = await generativeModel.generateContentStream(currentRequest);
// Process the next streaming response
const nextResult = await this.processNextStreamingResponse(nextStreamingResp, true);
// Update state
finalResponse += nextResult.textContent;
if (nextResult.textContent) {
modelResponses.push(nextResult.textContent);
}
if (nextResult.functionCall) {
pendingFunctionCalls.push(nextResult.functionCall);
}
}
}
}
console.log(`Model stream processing completed`);
console.log(`Files written: ${filesWritten.length}, Files deleted: ${filesDeleted.length}`);
// If no explicit decision was made using the makeDecision function, try to parse it from the text
if (!decision) {
try {
// Try to parse a JSON decision from the text
const jsonMatch = finalResponse.match(/\{[\s\S]*"decision"[\s\S]*\}/);
if (jsonMatch) {
decision = JSON.parse(jsonMatch[0]) as ModelResponse;
console.log(`Parsed decision from text: ${decision.decision}, reason: ${decision.reason}`);
}
} catch (error) {
console.error(`Error parsing decision from text: ${error}`);
}
}
return {
text: finalResponse,
decision: decision,
modelResponses: modelResponses,
filesWritten: filesWritten,
filesDeleted: filesDeleted
};
}
/**
* Create the next request with function call and response
* @param currentRequest Current request
* @param functionCall Function call
* @param functionResponseObj Function response object
* @param isError Whether the response is an error
* @returns Next request
*/
private createNextRequest(
currentRequest: GenerateContentRequest,
functionCall: any,
functionResponseObj: any,
isError: boolean = false
): GenerateContentRequest {
return {
contents: [
...currentRequest.contents,
{
@ -425,143 +635,45 @@ ${additionalContext ? `\nAdditional context from project files:${additionalConte
],
tools: this.fileOperationTools,
};
// Generate the next response
const nextStreamingResp = await generativeModel.generateContentStream(currentRequest);
// Process the next streaming response
for await (const nextItem of nextStreamingResp.stream) {
console.log(`[DEBUG] Processing next stream item`);
// Avoid stringifying the entire item which can be too complex
if (nextItem.candidates && nextItem.candidates.length > 0) {
console.log(`[DEBUG] Next item has ${nextItem.candidates.length} candidates`);
}
let textContent = '';
let nextFunctionCall = null;
// Iterate over every part in the response
for (const part of nextItem.candidates?.[0]?.content?.parts || []) {
if (part.functionCall) {
nextFunctionCall = part.functionCall;
console.log(`[DEBUG] Function call detected in next stream: ${nextFunctionCall.name}`);
break;
} else if (part.text) {
textContent += part.text;
console.log(`[DEBUG] Text content detected in next stream: ${textContent.substring(0, 100)}${textContent.length > 100 ? '...' : ''}`);
}
}
if (nextFunctionCall) {
// Add to pending function calls to be processed
pendingFunctionCalls.push(nextFunctionCall);
} else if (textContent) {
finalResponse += textContent;
this.modelResponses.push(textContent);
}
}
} catch (error) {
console.error(`Error executing function ${functionName}:`, error);
// Create an error response object
const errorResponseObj = {
name: functionName,
response: {error: error instanceof Error ? error.message : String(error)}
};
// Update the request with the function call and error response
currentRequest = {
contents: [
...currentRequest.contents,
{
role: 'ASSISTANT',
parts: [
{
functionCall: functionCall
}
]
},
{
role: 'USER',
parts: [
{
functionResponse: errorResponseObj
}
]
}
],
tools: this.fileOperationTools,
};
// Generate the next response
const nextStreamingResp = await generativeModel.generateContentStream(currentRequest);
// Process the next streaming response
for await (const nextItem of nextStreamingResp.stream) {
console.log(`[DEBUG] Processing next stream item after error`);
// Avoid stringifying the entire item which can be too complex
if (nextItem.candidates && nextItem.candidates.length > 0) {
console.log(`[DEBUG] Next item after error has ${nextItem.candidates.length} candidates`);
}
let textContent = '';
let nextFunctionCall = null;
// Iterate over every part in the response
for (const part of nextItem.candidates?.[0]?.content?.parts || []) {
if (part.functionCall) {
nextFunctionCall = part.functionCall;
console.log(`[DEBUG] Function call detected in next stream after error: ${nextFunctionCall.name}`);
break;
} else if (part.text) {
textContent += part.text;
console.log(`[DEBUG] Text content detected in next stream after error: ${textContent.substring(0, 100)}${textContent.length > 100 ? '...' : ''}`);
}
}
if (nextFunctionCall) {
// Add to pending function calls to be processed
pendingFunctionCalls.push(nextFunctionCall);
} else if (textContent) {
finalResponse += textContent;
this.modelResponses.push(textContent);
}
}
}
}
}
console.log(`Model stream processing completed for ${this.workitem.name}`);
console.log(`Files written: ${this.filesWritten.length}, Files deleted: ${this.filesDeleted.length}`);
// If no explicit decision was made using the makeDecision function, try to parse it from the text
if (!this.decision) {
try {
// Try to parse a JSON decision from the text
const jsonMatch = finalResponse.match(/\{[\s\S]*"decision"[\s\S]*\}/);
if (jsonMatch) {
this.decision = JSON.parse(jsonMatch[0]) as ModelResponse;
console.log(`Parsed decision from text: ${this.decision.decision}, reason: ${this.decision.reason}`);
}
} catch (error) {
console.error(`Error parsing decision from text: ${error}`);
}
}
return {
text: finalResponse,
decision: this.decision,
filesWritten: this.filesWritten,
filesDeleted: this.filesDeleted
};
}
/**
* Get all model responses collected during processing
* @returns Array of model response texts
* Process the next streaming response
* @param nextStreamingResp Next streaming response
* @param isAfterError Whether this is after an error
* @returns Object containing text content and function call
*/
getModelResponses(): string[] {
return this.modelResponses;
private async processNextStreamingResponse(
nextStreamingResp: any,
isAfterError: boolean = false
): Promise<{
textContent: string,
functionCall: any
}> {
let textContent = '';
let functionCall = null;
for await (const nextItem of nextStreamingResp.stream) {
console.log(`[DEBUG] Processing next stream item${isAfterError ? ' after error' : ''}`);
// Avoid stringifying the entire item which can be too complex
if (nextItem.candidates && nextItem.candidates.length > 0) {
console.log(`[DEBUG] Next item${isAfterError ? ' after error' : ''} has ${nextItem.candidates.length} candidates`);
}
// Iterate over every part in the response
for (const part of nextItem.candidates?.[0]?.content?.parts || []) {
if (part.functionCall) {
functionCall = part.functionCall;
console.log(`[DEBUG] Function call detected in next stream${isAfterError ? ' after error' : ''}: ${functionCall.name}`);
break;
} else if (part.text) {
textContent += part.text;
console.log(`[DEBUG] Text content detected in next stream${isAfterError ? ' after error' : ''}: ${textContent.substring(0, 100)}${textContent.length > 100 ? '...' : ''}`);
}
}
}
return {textContent, functionCall};
}
}