WIP
This commit is contained in:
parent
7eea12cce0
commit
6172a58166
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
.output.txt
|
@ -5,11 +5,19 @@ A Google Cloud Function that processes workitem prompts and generates test speci
|
|||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
This function:
|
This function:
|
||||||
1. Clones the main repository containing prompts
|
1. Clones the main repository containing prompts (read-only access)
|
||||||
2. Iterates over each project in the prompts/ directory
|
2. Iterates over each project in the prompts/ directory
|
||||||
3. Clones the project repository
|
3. For each project:
|
||||||
4. Uses the Gemini API to apply guidelines from the project's AI.md file
|
- Clones the project repository (read-write access)
|
||||||
5. Creates a pull request in the project repository with the generated test specifications
|
- Lets Gemini operate within the project directory
|
||||||
|
- Gemini iterates over all prompts (workitems)
|
||||||
|
- Gemini decides whether an operation is required
|
||||||
|
- Gemini implements the workitem in the target project repo or removes implementation for inactive workitems
|
||||||
|
- Gemini outputs whether the work item was skipped/created/updated/deleted
|
||||||
|
- Gemini can call functions to interact with the project repository (read, write, delete, search files, etc.)
|
||||||
|
- Tracks all file operations performed by Gemini
|
||||||
|
- Updates workitem prompts with implementation logs
|
||||||
|
4. Creates a pull request in the project repository with the generated test specifications and git patch
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
@ -55,6 +63,7 @@ The function requires several environment variables to be set:
|
|||||||
- `GOOGLE_CLOUD_PROJECT_ID`: Your Google Cloud project ID
|
- `GOOGLE_CLOUD_PROJECT_ID`: Your Google Cloud project ID
|
||||||
- `GOOGLE_CLOUD_LOCATION`: Google Cloud region (default: us-central1)
|
- `GOOGLE_CLOUD_LOCATION`: Google Cloud region (default: us-central1)
|
||||||
- `GEMINI_MODEL`: Gemini model to use (default: gemini-1.5-pro)
|
- `GEMINI_MODEL`: Gemini model to use (default: gemini-1.5-pro)
|
||||||
|
- Note: The model must support function calling (gemini-1.5-pro and later versions support this feature)
|
||||||
|
|
||||||
### Function Configuration
|
### Function Configuration
|
||||||
- `DEBUG`: Set to 'true' to enable debug logging
|
- `DEBUG`: Set to 'true' to enable debug logging
|
||||||
@ -114,6 +123,12 @@ Run tests in watch mode:
|
|||||||
npm run test:watch
|
npm run test:watch
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The test suite includes tests for:
|
||||||
|
- HTTP response formatting
|
||||||
|
- Project service functionality
|
||||||
|
- Processor service operations
|
||||||
|
- Gemini service function calling capabilities
|
||||||
|
|
||||||
## Deployment
|
## Deployment
|
||||||
|
|
||||||
### HTTP Trigger
|
### HTTP Trigger
|
||||||
@ -139,6 +154,18 @@ The function is organized into several services:
|
|||||||
- **RepositoryService**: Handles Git operations like cloning repositories and creating branches
|
- **RepositoryService**: Handles Git operations like cloning repositories and creating branches
|
||||||
- **ProjectService**: Finds and processes projects and workitems
|
- **ProjectService**: Finds and processes projects and workitems
|
||||||
- **GeminiService**: Interacts with the Gemini API to generate test specifications
|
- **GeminiService**: Interacts with the Gemini API to generate test specifications
|
||||||
|
- Supports function calling to allow Gemini to interact with the project repository
|
||||||
|
- Defines file operation functions that Gemini can call (read, write, delete, list, search)
|
||||||
|
- Handles function calls and responses in a chat session
|
||||||
|
- Includes relevant files from the prompts/ directory in the prompt to Gemini
|
||||||
|
- Doesn't rely on hardcoded prompts, letting Gemini decide what to do based on workitem content
|
||||||
|
- **GeminiProjectProcessor**: Handles Gemini operations within a project directory
|
||||||
|
- Provides file access API for Gemini to use via function calling
|
||||||
|
- Implements methods for reading, writing, deleting, listing, and searching files
|
||||||
|
- Passes itself to GeminiService to handle function calls
|
||||||
|
- Tracks all file operations performed by Gemini
|
||||||
|
- Updates workitem prompts with implementation logs (created/updated/deleted files)
|
||||||
|
- Generates git patches of changes for pull request descriptions
|
||||||
- **PullRequestService**: Creates pull requests in project repositories
|
- **PullRequestService**: Creates pull requests in project repositories
|
||||||
- **ProcessorService**: Orchestrates the entire process
|
- **ProcessorService**: Orchestrates the entire process
|
||||||
|
|
||||||
@ -146,15 +173,16 @@ The function is organized into several services:
|
|||||||
|
|
||||||
```
|
```
|
||||||
src/
|
src/
|
||||||
├── index.ts # Main entry point
|
├── index.ts # Main entry point
|
||||||
├── types.ts # Type definitions
|
├── types.ts # Type definitions
|
||||||
└── services/ # Service modules
|
└── services/ # Service modules
|
||||||
├── repository-service.ts # Git operations
|
├── repository-service.ts # Git operations
|
||||||
├── project-service.ts # Project and workitem processing
|
├── project-service.ts # Project and workitem processing
|
||||||
├── gemini-service.ts # Gemini API integration
|
├── gemini-service.ts # Gemini API integration
|
||||||
├── pull-request-service.ts # Pull request creation
|
├── gemini-project-processor.ts # Gemini operations within a project
|
||||||
├── processor-service.ts # Process orchestration
|
├── pull-request-service.ts # Pull request creation
|
||||||
└── __tests__/ # Unit tests
|
├── processor-service.ts # Process orchestration
|
||||||
|
└── __tests__/ # Unit tests
|
||||||
```
|
```
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
@ -59,6 +59,8 @@ export function formatHttpResponse(results: ProcessResult[]): HttpResponse {
|
|||||||
const workitemsProcessed = result.processedWorkitems.length;
|
const workitemsProcessed = result.processedWorkitems.length;
|
||||||
const workitemsSkipped = result.processedWorkitems.filter(w => !w.workitem.isActive).length;
|
const workitemsSkipped = result.processedWorkitems.filter(w => !w.workitem.isActive).length;
|
||||||
const workitemsUpdated = result.processedWorkitems.filter(w => w.success).length;
|
const workitemsUpdated = result.processedWorkitems.filter(w => w.success).length;
|
||||||
|
const workitemsCreated = result.processedWorkitems.filter(w => w.success && w.status === 'created').length;
|
||||||
|
const filesWritten = result.processedWorkitems.reduce((sum, w) => sum + (w.filesWritten?.length || 0), 0);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name: result.project.name,
|
name: result.project.name,
|
||||||
@ -67,7 +69,10 @@ export function formatHttpResponse(results: ProcessResult[]): HttpResponse {
|
|||||||
workitemsProcessed,
|
workitemsProcessed,
|
||||||
workitemsSkipped,
|
workitemsSkipped,
|
||||||
workitemsUpdated,
|
workitemsUpdated,
|
||||||
pullRequestUrl: result.pullRequestUrl
|
workitemsCreated,
|
||||||
|
filesWritten,
|
||||||
|
pullRequestUrl: result.pullRequestUrl,
|
||||||
|
gitPatch: result.gitPatch
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -0,0 +1,435 @@
|
|||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
import { GeminiService } from '../gemini-service';
|
||||||
|
import { GeminiProjectProcessor } from '../gemini-project-processor';
|
||||||
|
import { Project } from '../../types';
|
||||||
|
|
||||||
|
// Mock dependencies
|
||||||
|
jest.mock('@google-cloud/vertexai');
|
||||||
|
jest.mock('fs');
|
||||||
|
jest.mock('path');
|
||||||
|
jest.mock('../../config', () => ({
|
||||||
|
GOOGLE_CLOUD_PROJECT_ID: 'mock-project-id',
|
||||||
|
GOOGLE_CLOUD_LOCATION: 'mock-location',
|
||||||
|
GEMINI_MODEL: 'mock-model',
|
||||||
|
DRY_RUN_SKIP_GEMINI: false
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('GeminiService', () => {
|
||||||
|
let geminiService: GeminiService;
|
||||||
|
let mockGeminiProjectProcessor: jest.Mocked<GeminiProjectProcessor>;
|
||||||
|
let mockVertexAI: any;
|
||||||
|
let mockGenerativeModel: any;
|
||||||
|
let mockChat: any;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
|
||||||
|
// Create a new instance of GeminiService
|
||||||
|
geminiService = new GeminiService();
|
||||||
|
|
||||||
|
// Mock GeminiProjectProcessor
|
||||||
|
mockGeminiProjectProcessor = {
|
||||||
|
getFileContent: jest.fn(),
|
||||||
|
writeFileContent: jest.fn(),
|
||||||
|
fileExists: jest.fn(),
|
||||||
|
listFiles: jest.fn(),
|
||||||
|
grepFiles: jest.fn(),
|
||||||
|
deleteFile: jest.fn(),
|
||||||
|
getCurrentWorkitem: jest.fn().mockReturnValue(null),
|
||||||
|
project: {} as Project,
|
||||||
|
projectRepoPath: '/mock/project/repo',
|
||||||
|
mainRepoPath: '/mock/main/repo',
|
||||||
|
processProject: jest.fn(),
|
||||||
|
processWorkitem: jest.fn(),
|
||||||
|
generateFeatureFile: jest.fn(),
|
||||||
|
collectRelevantFiles: jest.fn(),
|
||||||
|
matchesPattern: jest.fn()
|
||||||
|
} as unknown as jest.Mocked<GeminiProjectProcessor>;
|
||||||
|
|
||||||
|
// Mock VertexAI and its methods
|
||||||
|
mockChat = {
|
||||||
|
sendMessage: jest.fn()
|
||||||
|
};
|
||||||
|
|
||||||
|
mockGenerativeModel = {
|
||||||
|
startChat: jest.fn().mockReturnValue(mockChat)
|
||||||
|
};
|
||||||
|
|
||||||
|
mockVertexAI = {
|
||||||
|
getGenerativeModel: jest.fn().mockReturnValue(mockGenerativeModel)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Replace the VertexAI instance in GeminiService with our mock
|
||||||
|
(geminiService as any).vertexAI = mockVertexAI;
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('processFunctionCalls', () => {
|
||||||
|
it('should process getFileContent function call correctly', async () => {
|
||||||
|
// Setup mock responses
|
||||||
|
const mockResult = {
|
||||||
|
functionCalls: [
|
||||||
|
{
|
||||||
|
name: 'getFileContent',
|
||||||
|
args: JSON.stringify({ filePath: 'test/file.txt' })
|
||||||
|
}
|
||||||
|
],
|
||||||
|
text: jest.fn().mockReturnValue('Initial response')
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockNextResult = {
|
||||||
|
functionCalls: [],
|
||||||
|
text: jest.fn().mockReturnValue('Final response after function call')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Setup mock implementations
|
||||||
|
mockGeminiProjectProcessor.getFileContent.mockReturnValue('File content');
|
||||||
|
mockChat.sendMessage.mockResolvedValue(mockNextResult);
|
||||||
|
|
||||||
|
// Call the method
|
||||||
|
const result = await (geminiService as any).processFunctionCalls(
|
||||||
|
mockResult,
|
||||||
|
mockChat,
|
||||||
|
mockGeminiProjectProcessor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the result
|
||||||
|
expect(result).toBe('Final response after function call');
|
||||||
|
|
||||||
|
// Verify the function was called with correct arguments
|
||||||
|
expect(mockGeminiProjectProcessor.getFileContent).toHaveBeenCalledWith('test/file.txt');
|
||||||
|
|
||||||
|
// Verify the chat.sendMessage was called with correct arguments
|
||||||
|
expect(mockChat.sendMessage).toHaveBeenCalledWith({
|
||||||
|
functionResponse: {
|
||||||
|
name: 'getFileContent',
|
||||||
|
response: { result: JSON.stringify('File content') }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should process writeFileContent function call correctly', async () => {
|
||||||
|
// Setup mock responses
|
||||||
|
const mockResult = {
|
||||||
|
functionCalls: [
|
||||||
|
{
|
||||||
|
name: 'writeFileContent',
|
||||||
|
args: JSON.stringify({
|
||||||
|
filePath: 'test/file.txt',
|
||||||
|
content: 'New content'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
],
|
||||||
|
text: jest.fn().mockReturnValue('Initial response')
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockNextResult = {
|
||||||
|
functionCalls: [],
|
||||||
|
text: jest.fn().mockReturnValue('Final response after function call')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Setup mock implementations
|
||||||
|
mockGeminiProjectProcessor.writeFileContent.mockImplementation(() => {});
|
||||||
|
mockChat.sendMessage.mockResolvedValue(mockNextResult);
|
||||||
|
|
||||||
|
// Call the method
|
||||||
|
const result = await (geminiService as any).processFunctionCalls(
|
||||||
|
mockResult,
|
||||||
|
mockChat,
|
||||||
|
mockGeminiProjectProcessor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the result
|
||||||
|
expect(result).toBe('Final response after function call');
|
||||||
|
|
||||||
|
// Verify the function was called with correct arguments
|
||||||
|
expect(mockGeminiProjectProcessor.writeFileContent).toHaveBeenCalledWith(
|
||||||
|
'test/file.txt',
|
||||||
|
'New content',
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the chat.sendMessage was called with correct arguments
|
||||||
|
expect(mockChat.sendMessage).toHaveBeenCalledWith({
|
||||||
|
functionResponse: {
|
||||||
|
name: 'writeFileContent',
|
||||||
|
response: { result: JSON.stringify('File test/file.txt written successfully') }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should process fileExists function call correctly', async () => {
|
||||||
|
// Setup mock responses
|
||||||
|
const mockResult = {
|
||||||
|
functionCalls: [
|
||||||
|
{
|
||||||
|
name: 'fileExists',
|
||||||
|
args: JSON.stringify({ filePath: 'test/file.txt' })
|
||||||
|
}
|
||||||
|
],
|
||||||
|
text: jest.fn().mockReturnValue('Initial response')
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockNextResult = {
|
||||||
|
functionCalls: [],
|
||||||
|
text: jest.fn().mockReturnValue('Final response after function call')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Setup mock implementations
|
||||||
|
mockGeminiProjectProcessor.fileExists.mockReturnValue(true);
|
||||||
|
mockChat.sendMessage.mockResolvedValue(mockNextResult);
|
||||||
|
|
||||||
|
// Call the method
|
||||||
|
const result = await (geminiService as any).processFunctionCalls(
|
||||||
|
mockResult,
|
||||||
|
mockChat,
|
||||||
|
mockGeminiProjectProcessor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the result
|
||||||
|
expect(result).toBe('Final response after function call');
|
||||||
|
|
||||||
|
// Verify the function was called with correct arguments
|
||||||
|
expect(mockGeminiProjectProcessor.fileExists).toHaveBeenCalledWith('test/file.txt');
|
||||||
|
|
||||||
|
// Verify the chat.sendMessage was called with correct arguments
|
||||||
|
expect(mockChat.sendMessage).toHaveBeenCalledWith({
|
||||||
|
functionResponse: {
|
||||||
|
name: 'fileExists',
|
||||||
|
response: { result: JSON.stringify(true) }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should process listFiles function call correctly', async () => {
|
||||||
|
// Setup mock responses
|
||||||
|
const mockResult = {
|
||||||
|
functionCalls: [
|
||||||
|
{
|
||||||
|
name: 'listFiles',
|
||||||
|
args: JSON.stringify({ dirPath: 'test/dir' })
|
||||||
|
}
|
||||||
|
],
|
||||||
|
text: jest.fn().mockReturnValue('Initial response')
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockNextResult = {
|
||||||
|
functionCalls: [],
|
||||||
|
text: jest.fn().mockReturnValue('Final response after function call')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Setup mock implementations
|
||||||
|
mockGeminiProjectProcessor.listFiles.mockReturnValue(['file1.txt', 'file2.txt']);
|
||||||
|
mockChat.sendMessage.mockResolvedValue(mockNextResult);
|
||||||
|
|
||||||
|
// Call the method
|
||||||
|
const result = await (geminiService as any).processFunctionCalls(
|
||||||
|
mockResult,
|
||||||
|
mockChat,
|
||||||
|
mockGeminiProjectProcessor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the result
|
||||||
|
expect(result).toBe('Final response after function call');
|
||||||
|
|
||||||
|
// Verify the function was called with correct arguments
|
||||||
|
expect(mockGeminiProjectProcessor.listFiles).toHaveBeenCalledWith('test/dir');
|
||||||
|
|
||||||
|
// Verify the chat.sendMessage was called with correct arguments
|
||||||
|
expect(mockChat.sendMessage).toHaveBeenCalledWith({
|
||||||
|
functionResponse: {
|
||||||
|
name: 'listFiles',
|
||||||
|
response: { result: JSON.stringify(['file1.txt', 'file2.txt']) }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should process grepFiles function call correctly', async () => {
|
||||||
|
// Setup mock responses
|
||||||
|
const mockResult = {
|
||||||
|
functionCalls: [
|
||||||
|
{
|
||||||
|
name: 'grepFiles',
|
||||||
|
args: JSON.stringify({
|
||||||
|
searchString: 'test',
|
||||||
|
filePattern: '*.txt'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
],
|
||||||
|
text: jest.fn().mockReturnValue('Initial response')
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockNextResult = {
|
||||||
|
functionCalls: [],
|
||||||
|
text: jest.fn().mockReturnValue('Final response after function call')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Setup mock implementations
|
||||||
|
const grepResults = [
|
||||||
|
{ file: 'file1.txt', line: 10, content: 'test content' },
|
||||||
|
{ file: 'file2.txt', line: 20, content: 'more test content' }
|
||||||
|
];
|
||||||
|
mockGeminiProjectProcessor.grepFiles.mockReturnValue(grepResults);
|
||||||
|
mockChat.sendMessage.mockResolvedValue(mockNextResult);
|
||||||
|
|
||||||
|
// Call the method
|
||||||
|
const result = await (geminiService as any).processFunctionCalls(
|
||||||
|
mockResult,
|
||||||
|
mockChat,
|
||||||
|
mockGeminiProjectProcessor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the result
|
||||||
|
expect(result).toBe('Final response after function call');
|
||||||
|
|
||||||
|
// Verify the function was called with correct arguments
|
||||||
|
expect(mockGeminiProjectProcessor.grepFiles).toHaveBeenCalledWith('test', '*.txt');
|
||||||
|
|
||||||
|
// Verify the chat.sendMessage was called with correct arguments
|
||||||
|
expect(mockChat.sendMessage).toHaveBeenCalledWith({
|
||||||
|
functionResponse: {
|
||||||
|
name: 'grepFiles',
|
||||||
|
response: { result: JSON.stringify(grepResults) }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle errors in function calls', async () => {
|
||||||
|
// Setup mock responses
|
||||||
|
const mockResult = {
|
||||||
|
functionCalls: [
|
||||||
|
{
|
||||||
|
name: 'getFileContent',
|
||||||
|
args: JSON.stringify({ filePath: 'test/file.txt' })
|
||||||
|
}
|
||||||
|
],
|
||||||
|
text: jest.fn().mockReturnValue('Initial response')
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockNextResult = {
|
||||||
|
functionCalls: [],
|
||||||
|
text: jest.fn().mockReturnValue('Final response after error')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Setup mock implementations to throw an error
|
||||||
|
mockGeminiProjectProcessor.getFileContent.mockImplementation(() => {
|
||||||
|
throw new Error('File not found');
|
||||||
|
});
|
||||||
|
mockChat.sendMessage.mockResolvedValue(mockNextResult);
|
||||||
|
|
||||||
|
// Call the method
|
||||||
|
const result = await (geminiService as any).processFunctionCalls(
|
||||||
|
mockResult,
|
||||||
|
mockChat,
|
||||||
|
mockGeminiProjectProcessor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the result
|
||||||
|
expect(result).toBe('Final response after error');
|
||||||
|
|
||||||
|
// Verify the function was called with correct arguments
|
||||||
|
expect(mockGeminiProjectProcessor.getFileContent).toHaveBeenCalledWith('test/file.txt');
|
||||||
|
|
||||||
|
// Verify the chat.sendMessage was called with correct arguments
|
||||||
|
expect(mockChat.sendMessage).toHaveBeenCalledWith({
|
||||||
|
functionResponse: {
|
||||||
|
name: 'getFileContent',
|
||||||
|
response: { error: 'File not found' }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle unknown function calls', async () => {
|
||||||
|
// Setup mock responses
|
||||||
|
const mockResult = {
|
||||||
|
functionCalls: [
|
||||||
|
{
|
||||||
|
name: 'unknownFunction',
|
||||||
|
args: JSON.stringify({ param: 'value' })
|
||||||
|
}
|
||||||
|
],
|
||||||
|
text: jest.fn().mockReturnValue('Initial response')
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockNextResult = {
|
||||||
|
functionCalls: [],
|
||||||
|
text: jest.fn().mockReturnValue('Final response after error')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Setup mock implementations
|
||||||
|
mockChat.sendMessage.mockResolvedValue(mockNextResult);
|
||||||
|
|
||||||
|
// Call the method
|
||||||
|
const result = await (geminiService as any).processFunctionCalls(
|
||||||
|
mockResult,
|
||||||
|
mockChat,
|
||||||
|
mockGeminiProjectProcessor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the result
|
||||||
|
expect(result).toBe('Final response after error');
|
||||||
|
|
||||||
|
// Verify the chat.sendMessage was called with correct arguments
|
||||||
|
expect(mockChat.sendMessage).toHaveBeenCalledWith({
|
||||||
|
functionResponse: {
|
||||||
|
name: 'unknownFunction',
|
||||||
|
response: { error: 'Unknown function: unknownFunction' }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return text response if no function calls', async () => {
|
||||||
|
// Setup mock responses
|
||||||
|
const mockResult = {
|
||||||
|
functionCalls: [],
|
||||||
|
text: jest.fn().mockReturnValue('Text response')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Call the method
|
||||||
|
const result = await (geminiService as any).processFunctionCalls(
|
||||||
|
mockResult,
|
||||||
|
mockChat,
|
||||||
|
mockGeminiProjectProcessor
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the result
|
||||||
|
expect(result).toBe('Text response');
|
||||||
|
|
||||||
|
// Verify no function calls were made
|
||||||
|
expect(mockGeminiProjectProcessor.getFileContent).not.toHaveBeenCalled();
|
||||||
|
expect(mockGeminiProjectProcessor.writeFileContent).not.toHaveBeenCalled();
|
||||||
|
expect(mockGeminiProjectProcessor.fileExists).not.toHaveBeenCalled();
|
||||||
|
expect(mockGeminiProjectProcessor.listFiles).not.toHaveBeenCalled();
|
||||||
|
expect(mockGeminiProjectProcessor.grepFiles).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Verify no chat messages were sent
|
||||||
|
expect(mockChat.sendMessage).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return text response if no geminiProjectProcessor provided', async () => {
|
||||||
|
// Setup mock responses
|
||||||
|
const mockResult = {
|
||||||
|
functionCalls: [
|
||||||
|
{
|
||||||
|
name: 'getFileContent',
|
||||||
|
args: JSON.stringify({ filePath: 'test/file.txt' })
|
||||||
|
}
|
||||||
|
],
|
||||||
|
text: jest.fn().mockReturnValue('Text response')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Call the method without providing geminiProjectProcessor
|
||||||
|
const result = await (geminiService as any).processFunctionCalls(
|
||||||
|
mockResult,
|
||||||
|
mockChat,
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify the result
|
||||||
|
expect(result).toBe('Text response');
|
||||||
|
|
||||||
|
// Verify no chat messages were sent
|
||||||
|
expect(mockChat.sendMessage).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -77,6 +77,8 @@ describe('formatHttpResponse', () => {
|
|||||||
workitemsProcessed: 2,
|
workitemsProcessed: 2,
|
||||||
workitemsSkipped: 1,
|
workitemsSkipped: 1,
|
||||||
workitemsUpdated: 2,
|
workitemsUpdated: 2,
|
||||||
|
workitemsCreated: 0,
|
||||||
|
filesWritten: 0,
|
||||||
pullRequestUrl: 'https://github.com/org/project1/pull/123'
|
pullRequestUrl: 'https://github.com/org/project1/pull/123'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -85,7 +87,9 @@ describe('formatHttpResponse', () => {
|
|||||||
error: 'Failed to process project',
|
error: 'Failed to process project',
|
||||||
workitemsProcessed: 1,
|
workitemsProcessed: 1,
|
||||||
workitemsSkipped: 0,
|
workitemsSkipped: 0,
|
||||||
workitemsUpdated: 0
|
workitemsUpdated: 0,
|
||||||
|
workitemsCreated: 0,
|
||||||
|
filesWritten: 0
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
@ -129,7 +133,9 @@ describe('formatHttpResponse', () => {
|
|||||||
success: true,
|
success: true,
|
||||||
workitemsProcessed: 0,
|
workitemsProcessed: 0,
|
||||||
workitemsSkipped: 0,
|
workitemsSkipped: 0,
|
||||||
workitemsUpdated: 0
|
workitemsUpdated: 0,
|
||||||
|
workitemsCreated: 0,
|
||||||
|
filesWritten: 0
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
|
@ -64,10 +64,11 @@ describe('ProcessorService', () => {
|
|||||||
{
|
{
|
||||||
project,
|
project,
|
||||||
processedWorkitems: [
|
processedWorkitems: [
|
||||||
{ workitem: workitem1, success: true },
|
{ workitem: workitem1, success: true, status: 'updated', filesWritten: [] },
|
||||||
{ workitem: workitem2, success: true }
|
{ workitem: workitem2, success: true, status: 'updated', filesWritten: [] }
|
||||||
],
|
],
|
||||||
pullRequestUrl: 'https://github.com/org/test-project/pull/123'
|
pullRequestUrl: 'https://github.com/org/test-project/pull/123',
|
||||||
|
gitPatch: 'mock-git-patch'
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -139,8 +140,8 @@ describe('ProcessorService', () => {
|
|||||||
{
|
{
|
||||||
project,
|
project,
|
||||||
processedWorkitems: [
|
processedWorkitems: [
|
||||||
{ workitem: activeWorkitem, success: true },
|
{ workitem: activeWorkitem, success: true, status: 'updated', filesWritten: [] },
|
||||||
{ workitem: deactivatedWorkitem, success: true }
|
{ workitem: deactivatedWorkitem, success: true, status: 'skipped', filesWritten: [] }
|
||||||
],
|
],
|
||||||
pullRequestUrl: 'https://github.com/org/test-project/pull/123'
|
pullRequestUrl: 'https://github.com/org/test-project/pull/123'
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,513 @@
|
|||||||
|
/**
|
||||||
|
* Service for handling Gemini operations within a project
|
||||||
|
*/
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
import { Project, Workitem, ProcessResult } from '../types';
|
||||||
|
import { GeminiService } from './gemini-service';
|
||||||
|
import { ProjectService } from './project-service';
|
||||||
|
import { RepositoryService } from './repository-service';
|
||||||
|
import { DRY_RUN_SKIP_GEMINI } from '../config';
|
||||||
|
|
||||||
|
export class GeminiProjectProcessor {
|
||||||
|
private geminiService: GeminiService;
|
||||||
|
private projectService: ProjectService;
|
||||||
|
private repositoryService: RepositoryService;
|
||||||
|
private project: Project;
|
||||||
|
private projectRepoPath: string;
|
||||||
|
private mainRepoPath: string;
|
||||||
|
private filesWritten: Map<string, string[]> = new Map(); // Map of workitem name to files written
|
||||||
|
private currentWorkitem: Workitem | null = null; // Track the current workitem being processed
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
project: Project,
|
||||||
|
projectRepoPath: string,
|
||||||
|
mainRepoPath: string
|
||||||
|
) {
|
||||||
|
this.project = project;
|
||||||
|
this.projectRepoPath = projectRepoPath;
|
||||||
|
this.mainRepoPath = mainRepoPath;
|
||||||
|
this.geminiService = new GeminiService();
|
||||||
|
this.projectService = new ProjectService();
|
||||||
|
this.repositoryService = new RepositoryService();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process the project using Gemini
|
||||||
|
* @returns Process result
|
||||||
|
*/
|
||||||
|
async processProject(): Promise<ProcessResult> {
|
||||||
|
console.log(`GeminiProjectProcessor: Processing project ${this.project.name}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Find all workitems in the project
|
||||||
|
const workitems = await this.projectService.findWorkitems(this.project.path);
|
||||||
|
console.log(`GeminiProjectProcessor: Found ${workitems.length} workitems in project ${this.project.name}`);
|
||||||
|
|
||||||
|
// Skip if no workitems found
|
||||||
|
if (workitems.length === 0) {
|
||||||
|
return {
|
||||||
|
project: this.project,
|
||||||
|
processedWorkitems: []
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read project guidelines
|
||||||
|
const projectGuidelines = await this.projectService.readProjectGuidelines(this.project.path);
|
||||||
|
|
||||||
|
// Process each workitem
|
||||||
|
const processedWorkitems = [];
|
||||||
|
for (const workitem of workitems) {
|
||||||
|
console.log(`GeminiProjectProcessor: Processing workitem: ${workitem.name}`);
|
||||||
|
const result = await this.processWorkitem(workitem, projectGuidelines);
|
||||||
|
processedWorkitems.push({ workitem, ...result });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate git patch if any files were written
|
||||||
|
let gitPatch: string | undefined = undefined;
|
||||||
|
const totalFilesWritten = processedWorkitems.reduce((total, item) => total + (item.filesWritten?.length || 0), 0);
|
||||||
|
|
||||||
|
if (totalFilesWritten > 0) {
|
||||||
|
try {
|
||||||
|
console.log(`Generating git patch for project ${this.project.name} with ${totalFilesWritten} files written`);
|
||||||
|
gitPatch = await this.repositoryService.generateGitPatch(this.projectRepoPath);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error generating git patch for project ${this.project.name}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
project: this.project,
|
||||||
|
processedWorkitems,
|
||||||
|
gitPatch
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error processing project ${this.project.name}:`, error);
|
||||||
|
return {
|
||||||
|
project: this.project,
|
||||||
|
processedWorkitems: [],
|
||||||
|
error: error instanceof Error ? error.message : String(error)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current workitem being processed
|
||||||
|
* @returns The current workitem or null if no workitem is being processed
|
||||||
|
*/
|
||||||
|
getCurrentWorkitem(): Workitem | null {
|
||||||
|
return this.currentWorkitem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process a workitem using Gemini
|
||||||
|
* @param workitem Workitem to process
|
||||||
|
* @param projectGuidelines Project guidelines
|
||||||
|
* @returns Result of the processing
|
||||||
|
*/
|
||||||
|
private async processWorkitem(
|
||||||
|
workitem: Workitem,
|
||||||
|
projectGuidelines: string
|
||||||
|
): Promise<{ success: boolean; error?: string; status?: 'skipped' | 'updated' | 'created'; filesWritten?: string[] }> {
|
||||||
|
try {
|
||||||
|
// Set the current workitem
|
||||||
|
this.currentWorkitem = workitem;
|
||||||
|
console.log(`GeminiProjectProcessor: Processing workitem: ${workitem.name} (Active: ${workitem.isActive})`);
|
||||||
|
|
||||||
|
// Initialize tracking for this workitem
|
||||||
|
this.filesWritten.set(workitem.name, []);
|
||||||
|
|
||||||
|
// Determine initial status based on workitem activity
|
||||||
|
let status: 'skipped' | 'updated' | 'created' = 'skipped';
|
||||||
|
|
||||||
|
// If workitem is not active, skip processing
|
||||||
|
if (!workitem.isActive) {
|
||||||
|
console.log(`GeminiProjectProcessor: Skipping inactive workitem: ${workitem.name}`);
|
||||||
|
|
||||||
|
// If the feature file exists, it should be deleted
|
||||||
|
const featureFileName = `${workitem.name}.feature`;
|
||||||
|
const featurePath = path.join(this.projectRepoPath, 'nitro-it', 'src', 'test', 'resources', 'workitems', featureFileName);
|
||||||
|
|
||||||
|
if (fs.existsSync(featurePath)) {
|
||||||
|
fs.unlinkSync(featurePath);
|
||||||
|
console.log(`GeminiProjectProcessor: Deleted feature file for inactive workitem: ${featurePath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: true, status: 'skipped', filesWritten: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read workitem content
|
||||||
|
const workitemContent = fs.readFileSync(workitem.path, 'utf-8');
|
||||||
|
|
||||||
|
// Collect all relevant files from the project directory
|
||||||
|
const relevantFiles = await this.collectRelevantFiles(workitem);
|
||||||
|
|
||||||
|
// Check if the feature file already exists to determine if this is an update or creation
|
||||||
|
const featureFileName = `${workitem.name}.feature`;
|
||||||
|
const featurePath = path.join(this.projectRepoPath, 'nitro-it', 'src', 'test', 'resources', 'workitems', featureFileName);
|
||||||
|
status = fs.existsSync(featurePath) ? 'updated' : 'created';
|
||||||
|
|
||||||
|
// Let Gemini decide what to do with the workitem
|
||||||
|
const result = await this.generateFeatureFile(
|
||||||
|
projectGuidelines,
|
||||||
|
workitemContent,
|
||||||
|
workitem.name,
|
||||||
|
relevantFiles
|
||||||
|
);
|
||||||
|
|
||||||
|
// Gemini will handle the file operations through function calls
|
||||||
|
// No need to manually create or delete files here
|
||||||
|
|
||||||
|
// Get the list of files written for this workitem
|
||||||
|
const filesWritten = this.filesWritten.get(workitem.name) || [];
|
||||||
|
|
||||||
|
// If no files were written, but the workitem is active, consider it skipped
|
||||||
|
if (filesWritten.length === 0) {
|
||||||
|
status = 'skipped';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the workitem file with implementation log
|
||||||
|
if (status !== 'skipped') {
|
||||||
|
try {
|
||||||
|
// Determine the log status based on the operation status
|
||||||
|
const logStatus = status === 'created' ? 'created' :
|
||||||
|
(status === 'updated' ? 'updated' : 'deleted');
|
||||||
|
|
||||||
|
// Get the list of files without the "deleted:" prefix for deleted files
|
||||||
|
const filesList = filesWritten.map(file =>
|
||||||
|
file.startsWith('deleted:') ? file.substring(8) : file
|
||||||
|
);
|
||||||
|
|
||||||
|
// Update the workitem file with implementation log
|
||||||
|
await this.projectService.updateWorkitemWithImplementationLog(
|
||||||
|
workitem,
|
||||||
|
logStatus,
|
||||||
|
filesList
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(`GeminiProjectProcessor: Updated workitem file with implementation log for ${workitem.name}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error updating workitem file with implementation log: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`GeminiProjectProcessor: Completed processing workitem: ${workitem.name} (Status: ${status}, Files written: ${filesWritten.length})`);
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
status,
|
||||||
|
filesWritten
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error processing workitem ${workitem.name}:`, error);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
status: 'skipped',
|
||||||
|
filesWritten: []
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collect relevant files from the project directory
|
||||||
|
* @param workitem The workitem being processed
|
||||||
|
* @returns Object containing file contents
|
||||||
|
*/
|
||||||
|
private async collectRelevantFiles(workitem: Workitem): Promise<Record<string, string>> {
|
||||||
|
const relevantFiles: Record<string, string> = {};
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get the project directory path
|
||||||
|
const projectDir = path.dirname(path.dirname(workitem.path)); // workitem.path -> workitems/name.md -> project/
|
||||||
|
|
||||||
|
// Check for INFO.md
|
||||||
|
const infoPath = path.join(projectDir, 'INFO.md');
|
||||||
|
if (fs.existsSync(infoPath)) {
|
||||||
|
relevantFiles['INFO.md'] = fs.readFileSync(infoPath, 'utf-8');
|
||||||
|
}
|
||||||
|
|
||||||
|
// AI.md is already included in the main prompt
|
||||||
|
|
||||||
|
// Check for other potentially relevant files
|
||||||
|
const potentialFiles = [
|
||||||
|
'README.md',
|
||||||
|
'GUIDELINES.md',
|
||||||
|
'ARCHITECTURE.md',
|
||||||
|
'IMPLEMENTATION.md'
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const file of potentialFiles) {
|
||||||
|
const filePath = path.join(projectDir, file);
|
||||||
|
if (fs.existsSync(filePath)) {
|
||||||
|
relevantFiles[file] = fs.readFileSync(filePath, 'utf-8');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for existing feature file if it exists
|
||||||
|
const featureFileName = `${workitem.name}.feature`;
|
||||||
|
const featurePath = path.join(this.projectRepoPath, 'nitro-it', 'src', 'test', 'resources', 'workitems', featureFileName);
|
||||||
|
|
||||||
|
if (fs.existsSync(featurePath)) {
|
||||||
|
relevantFiles['existing_feature.feature'] = fs.readFileSync(featurePath, 'utf-8');
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`GeminiProjectProcessor: Collected ${Object.keys(relevantFiles).length} relevant files for workitem ${workitem.name}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error collecting relevant files for workitem ${workitem.name}:`, error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return relevantFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate feature file content using Gemini API
|
||||||
|
* @param guidelines Project guidelines
|
||||||
|
* @param workitemContent Workitem content
|
||||||
|
* @param workitemName Name of the workitem
|
||||||
|
* @param relevantFiles Additional relevant files to include in the prompt
|
||||||
|
* @returns Generated feature file content
|
||||||
|
*/
|
||||||
|
private async generateFeatureFile(
|
||||||
|
guidelines: string,
|
||||||
|
workitemContent: string,
|
||||||
|
workitemName: string,
|
||||||
|
relevantFiles: Record<string, string> = {}
|
||||||
|
): Promise<string> {
|
||||||
|
const currentDate = new Date().toISOString();
|
||||||
|
|
||||||
|
// If dry run is enabled, return a mock feature file
|
||||||
|
if (DRY_RUN_SKIP_GEMINI) {
|
||||||
|
console.log(`[DRY RUN] Skipping Gemini API call for generating feature file for ${workitemName}`);
|
||||||
|
return `# Generated by prompts-to-test-spec on ${currentDate} (DRY RUN)
|
||||||
|
# Source: ${workitemName}
|
||||||
|
|
||||||
|
Feature: ${workitemName} (DRY RUN)
|
||||||
|
This is a mock feature file generated during dry run.
|
||||||
|
No actual Gemini API call was made.
|
||||||
|
|
||||||
|
Scenario: Mock scenario
|
||||||
|
Given a dry run is enabled
|
||||||
|
When the feature file is generated
|
||||||
|
Then a mock feature file is returned
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Using function calling to generate feature file for ${workitemName}`);
|
||||||
|
|
||||||
|
// Prepare additional context from relevant files
|
||||||
|
let additionalContext = '';
|
||||||
|
for (const [filename, content] of Object.entries(relevantFiles)) {
|
||||||
|
additionalContext += `\n--- ${filename} ---\n${content}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pass this instance as the processor to handle function calls
|
||||||
|
return await this.geminiService.generateFeatureFile(
|
||||||
|
guidelines,
|
||||||
|
workitemContent,
|
||||||
|
workitemName,
|
||||||
|
this, // Pass the GeminiProjectProcessor instance to handle function calls
|
||||||
|
additionalContext // Pass additional context from relevant files
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the content of a file in the project repository
|
||||||
|
* @param filePath Path to the file relative to the project repository root
|
||||||
|
* @returns File content
|
||||||
|
*/
|
||||||
|
getFileContent(filePath: string): string {
|
||||||
|
const fullPath = path.join(this.projectRepoPath, filePath);
|
||||||
|
if (!fs.existsSync(fullPath)) {
|
||||||
|
throw new Error(`File not found: ${filePath}`);
|
||||||
|
}
|
||||||
|
return fs.readFileSync(fullPath, 'utf-8');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write content to a file in the project repository
|
||||||
|
* @param filePath Path to the file relative to the project repository root
|
||||||
|
* @param content Content to write
|
||||||
|
* @param workitemName Optional name of the workitem being processed
|
||||||
|
*/
|
||||||
|
writeFileContent(filePath: string, content: string, workitemName?: string): void {
|
||||||
|
const fullPath = path.join(this.projectRepoPath, filePath);
|
||||||
|
const dirPath = path.dirname(fullPath);
|
||||||
|
|
||||||
|
// Ensure directory exists
|
||||||
|
if (!fs.existsSync(dirPath)) {
|
||||||
|
fs.mkdirSync(dirPath, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.writeFileSync(fullPath, content, 'utf-8');
|
||||||
|
|
||||||
|
// Track the file operation if workitemName is provided
|
||||||
|
if (workitemName) {
|
||||||
|
if (!this.filesWritten.has(workitemName)) {
|
||||||
|
this.filesWritten.set(workitemName, []);
|
||||||
|
}
|
||||||
|
this.filesWritten.get(workitemName)!.push(filePath);
|
||||||
|
console.log(`Tracked file write for workitem ${workitemName}: ${filePath}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file exists in the project repository
|
||||||
|
* @param filePath Path to the file relative to the project repository root
|
||||||
|
* @returns True if the file exists, false otherwise
|
||||||
|
*/
|
||||||
|
fileExists(filePath: string): boolean {
|
||||||
|
const fullPath = path.join(this.projectRepoPath, filePath);
|
||||||
|
return fs.existsSync(fullPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a file from the project repository
|
||||||
|
* @param filePath Path to the file relative to the project repository root
|
||||||
|
* @returns Message indicating success or that the file didn't exist
|
||||||
|
*/
|
||||||
|
deleteFile(filePath: string): string {
|
||||||
|
const fullPath = path.join(this.projectRepoPath, filePath);
|
||||||
|
|
||||||
|
if (!fs.existsSync(fullPath)) {
|
||||||
|
return `File ${filePath} does not exist`;
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.unlinkSync(fullPath);
|
||||||
|
|
||||||
|
// Track the file operation using the current workitem
|
||||||
|
const currentWorkitem = this.getCurrentWorkitem();
|
||||||
|
if (currentWorkitem) {
|
||||||
|
const workitemName = currentWorkitem.name;
|
||||||
|
if (!this.filesWritten.has(workitemName)) {
|
||||||
|
this.filesWritten.set(workitemName, []);
|
||||||
|
}
|
||||||
|
// We're tracking deletions in the same array as writes, but with a "deleted:" prefix
|
||||||
|
this.filesWritten.get(workitemName)!.push(`deleted:${filePath}`);
|
||||||
|
console.log(`Tracked file deletion for workitem ${workitemName}: ${filePath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return `File ${filePath} deleted successfully`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the name of the current workitem being processed
|
||||||
|
* This is a helper method to track file operations
|
||||||
|
* @returns The name of the current workitem or undefined
|
||||||
|
*/
|
||||||
|
private getCurrentWorkitemName(): string | undefined {
|
||||||
|
// This is a simple implementation that assumes the last part of the stack trace
|
||||||
|
// will contain the workitem name from the processWorkitem method
|
||||||
|
const stack = new Error().stack;
|
||||||
|
if (!stack) return undefined;
|
||||||
|
|
||||||
|
const lines = stack.split('\n');
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.includes('processWorkitem')) {
|
||||||
|
const match = /processWorkitem\s*\(\s*(\w+)/.exec(line);
|
||||||
|
if (match && match[1]) {
|
||||||
|
return match[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List files in a directory in the project repository
|
||||||
|
* @param dirPath Path to the directory relative to the project repository root
|
||||||
|
* @returns Array of file names
|
||||||
|
*/
|
||||||
|
listFiles(dirPath: string): string[] {
|
||||||
|
const fullPath = path.join(this.projectRepoPath, dirPath);
|
||||||
|
if (!fs.existsSync(fullPath)) {
|
||||||
|
throw new Error(`Directory not found: ${dirPath}`);
|
||||||
|
}
|
||||||
|
return fs.readdirSync(fullPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search for a string in project files
|
||||||
|
* @param searchString String to search for
|
||||||
|
* @param filePattern Optional file pattern to limit the search (e.g., "*.ts", "src/*.java")
|
||||||
|
* @returns Array of matches with file paths and line numbers
|
||||||
|
*/
|
||||||
|
grepFiles(searchString: string, filePattern?: string): Array<{file: string, line: number, content: string}> {
|
||||||
|
console.log(`Searching for "${searchString}" in project files${filePattern ? ` matching ${filePattern}` : ''}`);
|
||||||
|
|
||||||
|
if (!searchString) {
|
||||||
|
throw new Error('Search string is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
const results: Array<{file: string, line: number, content: string}> = [];
|
||||||
|
|
||||||
|
// Helper function to search in a file
|
||||||
|
const searchInFile = (filePath: string, relativePath: string) => {
|
||||||
|
try {
|
||||||
|
const content = fs.readFileSync(filePath, 'utf-8');
|
||||||
|
const lines = content.split('\n');
|
||||||
|
|
||||||
|
for (let i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].includes(searchString)) {
|
||||||
|
results.push({
|
||||||
|
file: relativePath,
|
||||||
|
line: i + 1, // 1-based line numbers
|
||||||
|
content: lines[i].trim()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error searching in file ${filePath}:`, error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Helper function to recursively search in a directory
|
||||||
|
const searchInDirectory = (dirPath: string, baseDir: string) => {
|
||||||
|
try {
|
||||||
|
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
const fullPath = path.join(dirPath, entry.name);
|
||||||
|
const relativePath = path.relative(baseDir, fullPath);
|
||||||
|
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
// Skip node_modules and .git directories
|
||||||
|
if (entry.name !== 'node_modules' && entry.name !== '.git') {
|
||||||
|
searchInDirectory(fullPath, baseDir);
|
||||||
|
}
|
||||||
|
} else if (entry.isFile()) {
|
||||||
|
// Check if the file matches the pattern
|
||||||
|
if (!filePattern || this.matchesPattern(entry.name, filePattern)) {
|
||||||
|
searchInFile(fullPath, relativePath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error searching in directory ${dirPath}:`, error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Start the search from the project repository root
|
||||||
|
searchInDirectory(this.projectRepoPath, this.projectRepoPath);
|
||||||
|
|
||||||
|
console.log(`Found ${results.length} matches for "${searchString}"`);
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a filename matches a simple pattern
|
||||||
|
* @param filename Filename to check
|
||||||
|
* @param pattern Pattern to match (supports * wildcard)
|
||||||
|
* @returns True if the filename matches the pattern
|
||||||
|
*/
|
||||||
|
private matchesPattern(filename: string, pattern: string): boolean {
|
||||||
|
// Convert the pattern to a regex
|
||||||
|
// Escape special regex characters except *
|
||||||
|
const regexPattern = pattern
|
||||||
|
.replace(/[.+?^${}()|[\]\\]/g, '\\$&') // Escape special regex chars
|
||||||
|
.replace(/\*/g, '.*'); // Convert * to .*
|
||||||
|
|
||||||
|
const regex = new RegExp(`^${regexPattern}$`);
|
||||||
|
return regex.test(filename);
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
/**
|
/**
|
||||||
* Service for handling Gemini API operations
|
* Service for handling Gemini API operations
|
||||||
*/
|
*/
|
||||||
import { VertexAI } from '@google-cloud/vertexai';
|
import { VertexAI, FunctionDeclaration, Tool, FunctionDeclarationSchemaType } from '@google-cloud/vertexai';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import { Project, Workitem } from '../types';
|
import { Project, Workitem } from '../types';
|
||||||
@ -17,6 +17,7 @@ export class GeminiService {
|
|||||||
private model: string;
|
private model: string;
|
||||||
private projectId: string;
|
private projectId: string;
|
||||||
private location: string;
|
private location: string;
|
||||||
|
private fileOperationTools: Tool[];
|
||||||
|
|
||||||
constructor(projectId?: string, location?: string, model?: string) {
|
constructor(projectId?: string, location?: string, model?: string) {
|
||||||
this.projectId = projectId || GOOGLE_CLOUD_PROJECT_ID;
|
this.projectId = projectId || GOOGLE_CLOUD_PROJECT_ID;
|
||||||
@ -31,6 +32,106 @@ export class GeminiService {
|
|||||||
project: this.projectId,
|
project: this.projectId,
|
||||||
location: this.location,
|
location: this.location,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Define file operation functions
|
||||||
|
this.fileOperationTools = [
|
||||||
|
{
|
||||||
|
function_declarations: [
|
||||||
|
{
|
||||||
|
name: "getFileContent",
|
||||||
|
description: "Get the content of a file in the project repository",
|
||||||
|
parameters: {
|
||||||
|
type: FunctionDeclarationSchemaType.OBJECT,
|
||||||
|
properties: {
|
||||||
|
filePath: {
|
||||||
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
|
description: "Path to the file relative to the project repository root"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
required: ["filePath"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "writeFileContent",
|
||||||
|
description: "Write content to a file in the project repository",
|
||||||
|
parameters: {
|
||||||
|
type: FunctionDeclarationSchemaType.OBJECT,
|
||||||
|
properties: {
|
||||||
|
filePath: {
|
||||||
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
|
description: "Path to the file relative to the project repository root"
|
||||||
|
},
|
||||||
|
content: {
|
||||||
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
|
description: "Content to write to the file"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
required: ["filePath", "content"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "fileExists",
|
||||||
|
description: "Check if a file exists in the project repository",
|
||||||
|
parameters: {
|
||||||
|
type: FunctionDeclarationSchemaType.OBJECT,
|
||||||
|
properties: {
|
||||||
|
filePath: {
|
||||||
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
|
description: "Path to the file relative to the project repository root"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
required: ["filePath"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "listFiles",
|
||||||
|
description: "List files in a directory in the project repository",
|
||||||
|
parameters: {
|
||||||
|
type: FunctionDeclarationSchemaType.OBJECT,
|
||||||
|
properties: {
|
||||||
|
dirPath: {
|
||||||
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
|
description: "Path to the directory relative to the project repository root"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
required: ["dirPath"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "grepFiles",
|
||||||
|
description: "Search for a string in project files",
|
||||||
|
parameters: {
|
||||||
|
type: FunctionDeclarationSchemaType.OBJECT,
|
||||||
|
properties: {
|
||||||
|
searchString: {
|
||||||
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
|
description: "String to search for in project files"
|
||||||
|
},
|
||||||
|
filePattern: {
|
||||||
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
|
description: "Optional file pattern to limit the search (e.g., '*.ts', 'src/*.java')"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
required: ["searchString"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "deleteFile",
|
||||||
|
description: "Delete a file from the project repository",
|
||||||
|
parameters: {
|
||||||
|
type: FunctionDeclarationSchemaType.OBJECT,
|
||||||
|
properties: {
|
||||||
|
filePath: {
|
||||||
|
type: FunctionDeclarationSchemaType.STRING,
|
||||||
|
description: "Path to the file relative to the project repository root"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
required: ["filePath"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -118,12 +219,16 @@ export class GeminiService {
|
|||||||
* @param guidelines Project guidelines
|
* @param guidelines Project guidelines
|
||||||
* @param workitemContent Workitem content
|
* @param workitemContent Workitem content
|
||||||
* @param workitemName Name of the workitem
|
* @param workitemName Name of the workitem
|
||||||
|
* @param geminiProjectProcessor Optional GeminiProjectProcessor to handle function calls
|
||||||
|
* @param additionalContext Optional additional context from relevant files
|
||||||
* @returns Generated feature file content
|
* @returns Generated feature file content
|
||||||
*/
|
*/
|
||||||
private async generateFeatureFile(
|
async generateFeatureFile(
|
||||||
guidelines: string,
|
guidelines: string,
|
||||||
workitemContent: string,
|
workitemContent: string,
|
||||||
workitemName: string
|
workitemName: string,
|
||||||
|
geminiProjectProcessor?: any,
|
||||||
|
additionalContext: string = ''
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
const currentDate = new Date().toISOString();
|
const currentDate = new Date().toISOString();
|
||||||
|
|
||||||
@ -146,28 +251,125 @@ Feature: ${workitemName} (DRY RUN)
|
|||||||
|
|
||||||
const generativeModel = this.vertexAI.getGenerativeModel({
|
const generativeModel = this.vertexAI.getGenerativeModel({
|
||||||
model: this.model,
|
model: this.model,
|
||||||
|
tools: geminiProjectProcessor ? this.fileOperationTools : undefined,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Send the AI.md file directly to Gemini without hardcoded instructions
|
// Send the AI.md file and additional context to Gemini without hardcoded instructions
|
||||||
const prompt = `
|
const prompt = `
|
||||||
${guidelines}
|
${guidelines}
|
||||||
|
|
||||||
Workitem:
|
Workitem:
|
||||||
${workitemContent}
|
${workitemContent}
|
||||||
|
|
||||||
Include the following comment at the top of the generated file:
|
You are tasked with implementing the workitem in the project repository according to the guidelines provided.
|
||||||
|
You have full control over how to implement the workitem, and you can decide what actions to take.
|
||||||
|
|
||||||
|
Include the following comment at the top of any generated files:
|
||||||
# Generated by prompts-to-test-spec on ${currentDate}
|
# Generated by prompts-to-test-spec on ${currentDate}
|
||||||
# Source: ${workitemName}
|
# Source: ${workitemName}
|
||||||
|
|
||||||
|
You have access to file operations to help you understand the project structure and create better implementations:
|
||||||
|
- getFileContent(filePath): Get the content of a file in the project repository
|
||||||
|
- writeFileContent(filePath, content): Write content to a file in the project repository
|
||||||
|
- fileExists(filePath): Check if a file exists in the project repository
|
||||||
|
- listFiles(dirPath): List files in a directory in the project repository
|
||||||
|
- grepFiles(searchString, filePattern): Search for a string in project files, optionally filtered by a file pattern
|
||||||
|
- deleteFile(filePath): Delete a file from the project repository
|
||||||
|
|
||||||
|
You can decide whether to create, update, or skip implementing this workitem based on your analysis.
|
||||||
|
Include the decision in your response.
|
||||||
|
${additionalContext ? `\nAdditional context from project files:${additionalContext}` : ''}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const result = await generativeModel.generateContent({
|
// Start the chat session
|
||||||
contents: [{ role: 'user', parts: [{ text: prompt }] }],
|
const chat = generativeModel.startChat();
|
||||||
});
|
|
||||||
|
|
||||||
const response = await result.response;
|
// Send the initial message
|
||||||
const generatedText = response.candidates[0]?.content?.parts[0]?.text || '';
|
const result = await chat.sendMessage(prompt);
|
||||||
|
|
||||||
return generatedText;
|
// Process function calls if needed
|
||||||
|
let finalResponse = await this.processFunctionCalls(result, chat, geminiProjectProcessor);
|
||||||
|
|
||||||
|
return finalResponse;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process function calls in the Gemini response
|
||||||
|
* @param result The result from Gemini
|
||||||
|
* @param chat The chat session
|
||||||
|
* @param geminiProjectProcessor The GeminiProjectProcessor to handle function calls
|
||||||
|
* @returns The final generated text
|
||||||
|
*/
|
||||||
|
private async processFunctionCalls(result: any, chat: any, geminiProjectProcessor?: any): Promise<string> {
|
||||||
|
// Check if there are function calls in the response
|
||||||
|
if (!result.functionCalls || result.functionCalls.length === 0 || !geminiProjectProcessor) {
|
||||||
|
// No function calls, return the text response
|
||||||
|
return result.text();
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Processing ${result.functionCalls.length} function calls from Gemini`);
|
||||||
|
|
||||||
|
// Process each function call
|
||||||
|
for (const functionCall of result.functionCalls) {
|
||||||
|
const functionName = functionCall.name;
|
||||||
|
const functionArgs = JSON.parse(functionCall.args);
|
||||||
|
|
||||||
|
console.log(`Executing function: ${functionName} with args:`, functionArgs);
|
||||||
|
|
||||||
|
let functionResponse;
|
||||||
|
try {
|
||||||
|
// Execute the function using the GeminiProjectProcessor
|
||||||
|
switch (functionName) {
|
||||||
|
case 'getFileContent':
|
||||||
|
functionResponse = geminiProjectProcessor.getFileContent(functionArgs.filePath);
|
||||||
|
break;
|
||||||
|
case 'writeFileContent':
|
||||||
|
// Get the current workitem name from the context
|
||||||
|
const currentWorkitem = geminiProjectProcessor.getCurrentWorkitem();
|
||||||
|
geminiProjectProcessor.writeFileContent(functionArgs.filePath, functionArgs.content, currentWorkitem?.name);
|
||||||
|
functionResponse = `File ${functionArgs.filePath} written successfully`;
|
||||||
|
break;
|
||||||
|
case 'fileExists':
|
||||||
|
functionResponse = geminiProjectProcessor.fileExists(functionArgs.filePath);
|
||||||
|
break;
|
||||||
|
case 'listFiles':
|
||||||
|
functionResponse = geminiProjectProcessor.listFiles(functionArgs.dirPath);
|
||||||
|
break;
|
||||||
|
case 'grepFiles':
|
||||||
|
functionResponse = geminiProjectProcessor.grepFiles(functionArgs.searchString, functionArgs.filePattern);
|
||||||
|
break;
|
||||||
|
case 'deleteFile':
|
||||||
|
functionResponse = geminiProjectProcessor.deleteFile(functionArgs.filePath);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(`Unknown function: ${functionName}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send the function response back to Gemini
|
||||||
|
const functionResponseObj = { functionResponse: { name: functionName, response: { result: JSON.stringify(functionResponse) } } };
|
||||||
|
const nextResult = await chat.sendMessage(functionResponseObj);
|
||||||
|
|
||||||
|
// Recursively process any additional function calls
|
||||||
|
return this.processFunctionCalls(nextResult, chat, geminiProjectProcessor);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error executing function ${functionName}:`, error);
|
||||||
|
|
||||||
|
// Send the error back to Gemini
|
||||||
|
const errorResponseObj = {
|
||||||
|
functionResponse: {
|
||||||
|
name: functionName,
|
||||||
|
response: { error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const nextResult = await chat.sendMessage(errorResponseObj);
|
||||||
|
|
||||||
|
// Recursively process any additional function calls
|
||||||
|
return this.processFunctionCalls(nextResult, chat, geminiProjectProcessor);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return the final text response
|
||||||
|
return result.text();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -253,9 +455,7 @@ Create a clear, professional pull request description that:
|
|||||||
The pull request description should be ready to use without further editing.
|
The pull request description should be ready to use without further editing.
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const result = await generativeModel.generateContent({
|
const result = await generativeModel.generateContent(prompt);
|
||||||
contents: [{ role: 'user', parts: [{ text: prompt }] }],
|
|
||||||
});
|
|
||||||
|
|
||||||
const response = await result.response;
|
const response = await result.response;
|
||||||
const generatedText = response.candidates[0]?.content?.parts[0]?.text || '';
|
const generatedText = response.candidates[0]?.content?.parts[0]?.text || '';
|
||||||
|
@ -7,6 +7,7 @@ import { RepositoryService } from './repository-service';
|
|||||||
import { ProjectService } from './project-service';
|
import { ProjectService } from './project-service';
|
||||||
import { GeminiService } from './gemini-service';
|
import { GeminiService } from './gemini-service';
|
||||||
import { PullRequestService } from './pull-request-service';
|
import { PullRequestService } from './pull-request-service';
|
||||||
|
import { GeminiProjectProcessor } from './gemini-project-processor';
|
||||||
import {
|
import {
|
||||||
MAIN_REPO_URL,
|
MAIN_REPO_URL,
|
||||||
validateConfig,
|
validateConfig,
|
||||||
@ -135,7 +136,7 @@ export class ProcessorService {
|
|||||||
for (const project of projects) {
|
for (const project of projects) {
|
||||||
try {
|
try {
|
||||||
console.log(`Starting processing of project: ${project.name}`);
|
console.log(`Starting processing of project: ${project.name}`);
|
||||||
const result = await this.processProject(project);
|
const result = await this.processProject(project, mainRepoPath);
|
||||||
console.log(`Finished processing project: ${project.name}`);
|
console.log(`Finished processing project: ${project.name}`);
|
||||||
results.push(result);
|
results.push(result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@ -218,23 +219,12 @@ export class ProcessorService {
|
|||||||
/**
|
/**
|
||||||
* Process a single project
|
* Process a single project
|
||||||
* @param project Project information
|
* @param project Project information
|
||||||
|
* @param mainRepoPath Path to the main repository
|
||||||
* @returns Process result
|
* @returns Process result
|
||||||
*/
|
*/
|
||||||
async processProject(project: Project): Promise<ProcessResult> {
|
async processProject(project: Project, mainRepoPath: string): Promise<ProcessResult> {
|
||||||
console.log(`Processing project: ${project.name}`);
|
console.log(`Processing project: ${project.name}`);
|
||||||
|
|
||||||
// Find all workitems in the project
|
|
||||||
const workitems = await this.projectService.findWorkitems(project.path);
|
|
||||||
console.log(`Found ${workitems.length} workitems in project ${project.name}`);
|
|
||||||
|
|
||||||
// Skip if no workitems found
|
|
||||||
if (workitems.length === 0) {
|
|
||||||
return {
|
|
||||||
project,
|
|
||||||
processedWorkitems: []
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip if no repository URL
|
// Skip if no repository URL
|
||||||
if (!project.repoUrl) {
|
if (!project.repoUrl) {
|
||||||
console.log(`Skipping project ${project.name}: No repository URL found`);
|
console.log(`Skipping project ${project.name}: No repository URL found`);
|
||||||
@ -252,37 +242,36 @@ export class ProcessorService {
|
|||||||
console.log(`Cloning project repository: ${project.repoUrl}`);
|
console.log(`Cloning project repository: ${project.repoUrl}`);
|
||||||
const projectRepoPath = await this.repositoryService.cloneProjectRepository(project, credentials);
|
const projectRepoPath = await this.repositoryService.cloneProjectRepository(project, credentials);
|
||||||
|
|
||||||
// Create a new branch for changes
|
// Create a GeminiProjectProcessor to handle the project
|
||||||
const branchName = `update-workitems-${new Date().toISOString().split('T')[0]}`;
|
const geminiProjectProcessor = new GeminiProjectProcessor(
|
||||||
await this.repositoryService.createBranch(projectRepoPath, branchName);
|
project,
|
||||||
|
projectRepoPath,
|
||||||
|
mainRepoPath
|
||||||
|
);
|
||||||
|
|
||||||
// Process each workitem
|
// Let Gemini operate within the project
|
||||||
const processedWorkitems = [];
|
console.log(`Letting Gemini operate within project: ${project.name}`);
|
||||||
for (const workitem of workitems) {
|
const result = await geminiProjectProcessor.processProject();
|
||||||
console.log(`Processing workitem: ${workitem.name}`);
|
|
||||||
const result = await this.geminiService.processWorkitem(project, workitem, projectRepoPath);
|
|
||||||
processedWorkitems.push({ workitem, ...result });
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no changes were made, return early
|
// If no workitems were processed or there was an error, return early
|
||||||
if (processedWorkitems.length === 0) {
|
if (result.processedWorkitems.length === 0 || result.error) {
|
||||||
console.log(`No workitems processed for project ${project.name}`);
|
console.log(`No workitems processed for project ${project.name}`);
|
||||||
return {
|
return result;
|
||||||
project,
|
|
||||||
processedWorkitems: []
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip creating commits/PRs if dry run is enabled
|
// Skip creating commits/PRs if dry run is enabled
|
||||||
if (DRY_RUN_SKIP_COMMITS) {
|
if (DRY_RUN_SKIP_COMMITS) {
|
||||||
console.log(`[DRY RUN] Skipping commit and PR creation for project ${project.name}`);
|
console.log(`[DRY RUN] Skipping commit and PR creation for project ${project.name}`);
|
||||||
return {
|
return {
|
||||||
project,
|
...result,
|
||||||
processedWorkitems,
|
|
||||||
pullRequestUrl: 'https://example.com/mock-pr-url (DRY RUN)'
|
pullRequestUrl: 'https://example.com/mock-pr-url (DRY RUN)'
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create a new branch for changes
|
||||||
|
const branchName = `update-workitems-${new Date().toISOString().split('T')[0]}`;
|
||||||
|
await this.repositoryService.createBranch(projectRepoPath, branchName);
|
||||||
|
|
||||||
// Commit changes
|
// Commit changes
|
||||||
await this.repositoryService.commitChanges(
|
await this.repositoryService.commitChanges(
|
||||||
projectRepoPath,
|
projectRepoPath,
|
||||||
@ -296,15 +285,15 @@ export class ProcessorService {
|
|||||||
const pullRequestUrl = await this.pullRequestService.createPullRequest(
|
const pullRequestUrl = await this.pullRequestService.createPullRequest(
|
||||||
project,
|
project,
|
||||||
branchName,
|
branchName,
|
||||||
processedWorkitems,
|
result.processedWorkitems,
|
||||||
credentials
|
credentials,
|
||||||
|
result.gitPatch
|
||||||
);
|
);
|
||||||
|
|
||||||
console.log(`Created pull request: ${pullRequestUrl}`);
|
console.log(`Created pull request: ${pullRequestUrl}`);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
project,
|
...result,
|
||||||
processedWorkitems,
|
|
||||||
pullRequestUrl
|
pullRequestUrl
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
@ -230,4 +230,60 @@ export class ProjectService {
|
|||||||
const updatedWorkitem = { ...workitem, pullRequestUrl };
|
const updatedWorkitem = { ...workitem, pullRequestUrl };
|
||||||
return updatedWorkitem;
|
return updatedWorkitem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update workitem file with implementation log
|
||||||
|
* @param workitem Workitem to update
|
||||||
|
* @param status Status of the workitem (created, updated, deleted)
|
||||||
|
* @param files Array of files that were created, updated, or deleted
|
||||||
|
* @returns Updated workitem
|
||||||
|
*/
|
||||||
|
async updateWorkitemWithImplementationLog(
|
||||||
|
workitem: Workitem,
|
||||||
|
status: 'created' | 'updated' | 'deleted',
|
||||||
|
files: string[]
|
||||||
|
): Promise<Workitem> {
|
||||||
|
if (!fs.existsSync(workitem.path)) {
|
||||||
|
throw new Error(`Workitem file not found: ${workitem.path}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the current content
|
||||||
|
let content = fs.readFileSync(workitem.path, 'utf-8');
|
||||||
|
const lines = content.split('\n');
|
||||||
|
|
||||||
|
// Format the log message
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
let logMessage = `\n\n<!-- Implementation Log: ${timestamp} -->\n`;
|
||||||
|
|
||||||
|
switch (status) {
|
||||||
|
case 'created':
|
||||||
|
logMessage += `<!-- Workitem has been implemented. Created files: -->\n`;
|
||||||
|
break;
|
||||||
|
case 'updated':
|
||||||
|
logMessage += `<!-- Workitem has been updated. Modified files: -->\n`;
|
||||||
|
break;
|
||||||
|
case 'deleted':
|
||||||
|
logMessage += `<!-- Workitem has been deleted. Removed files: -->\n`;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the list of files
|
||||||
|
if (files.length > 0) {
|
||||||
|
for (const file of files) {
|
||||||
|
logMessage += `<!-- - ${file} -->\n`;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logMessage += `<!-- No files were affected. -->\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append the log to the end of the file
|
||||||
|
lines.push(logMessage);
|
||||||
|
|
||||||
|
// Write the updated content back to the file
|
||||||
|
const updatedContent = lines.join('\n');
|
||||||
|
fs.writeFileSync(workitem.path, updatedContent, 'utf-8');
|
||||||
|
|
||||||
|
// Update the workitem object (no need to change any properties)
|
||||||
|
return workitem;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,8 +23,9 @@ export class PullRequestService {
|
|||||||
async createPullRequest(
|
async createPullRequest(
|
||||||
project: Project,
|
project: Project,
|
||||||
branchName: string,
|
branchName: string,
|
||||||
processedWorkitems: { workitem: Workitem; success: boolean; error?: string }[],
|
processedWorkitems: { workitem: Workitem; success: boolean; error?: string; status?: 'skipped' | 'updated' | 'created'; filesWritten?: string[] }[],
|
||||||
credentials: RepoCredentials
|
credentials: RepoCredentials,
|
||||||
|
gitPatch?: string
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
if (!project.repoHost || !project.repoUrl) {
|
if (!project.repoHost || !project.repoUrl) {
|
||||||
throw new Error(`Repository information not found for project ${project.name}`);
|
throw new Error(`Repository information not found for project ${project.name}`);
|
||||||
@ -32,7 +33,7 @@ export class PullRequestService {
|
|||||||
|
|
||||||
// Generate PR title and description
|
// Generate PR title and description
|
||||||
const title = `Update workitems: ${new Date().toISOString().split('T')[0]}`;
|
const title = `Update workitems: ${new Date().toISOString().split('T')[0]}`;
|
||||||
const description = await this.generatePullRequestDescription(processedWorkitems);
|
const description = await this.generatePullRequestDescription(processedWorkitems, gitPatch);
|
||||||
|
|
||||||
// Determine the repository host type and create PR accordingly
|
// Determine the repository host type and create PR accordingly
|
||||||
if (project.repoHost.includes('github.com')) {
|
if (project.repoHost.includes('github.com')) {
|
||||||
@ -150,12 +151,21 @@ export class PullRequestService {
|
|||||||
/**
|
/**
|
||||||
* Generate a description for the pull request using Gemini
|
* Generate a description for the pull request using Gemini
|
||||||
* @param processedWorkitems List of processed workitems
|
* @param processedWorkitems List of processed workitems
|
||||||
|
* @param gitPatch Optional git patch to include in the description
|
||||||
* @returns Pull request description
|
* @returns Pull request description
|
||||||
*/
|
*/
|
||||||
private async generatePullRequestDescription(
|
private async generatePullRequestDescription(
|
||||||
processedWorkitems: { workitem: Workitem; success: boolean; error?: string }[]
|
processedWorkitems: { workitem: Workitem; success: boolean; error?: string; status?: 'skipped' | 'updated' | 'created'; filesWritten?: string[] }[],
|
||||||
|
gitPatch?: string
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
// Use Gemini to generate the pull request description
|
// Use Gemini to generate the pull request description
|
||||||
return await this.geminiService.generatePullRequestDescription(processedWorkitems);
|
const description = await this.geminiService.generatePullRequestDescription(processedWorkitems);
|
||||||
|
|
||||||
|
// If there's a git patch, append it to the description
|
||||||
|
if (gitPatch && gitPatch !== "No changes detected.") {
|
||||||
|
return `${description}\n\n## Git Patch\n\`\`\`diff\n${gitPatch}\n\`\`\``;
|
||||||
|
}
|
||||||
|
|
||||||
|
return description;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -105,6 +105,33 @@ export class RepositoryService {
|
|||||||
await git.push('origin', branchName, ['--set-upstream']);
|
await git.push('origin', branchName, ['--set-upstream']);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a git patch of the changes in a repository
|
||||||
|
* @param repoDir Path to the repository
|
||||||
|
* @returns Git patch as a string
|
||||||
|
*/
|
||||||
|
async generateGitPatch(repoDir: string): Promise<string> {
|
||||||
|
const git = simpleGit(repoDir);
|
||||||
|
|
||||||
|
// Check if there are any changes
|
||||||
|
const status = await git.status();
|
||||||
|
if (status.files.length === 0) {
|
||||||
|
return "No changes detected.";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate a diff of all changes (staged and unstaged)
|
||||||
|
const diff = await git.diff(['--staged', '--no-color']);
|
||||||
|
const untrackedDiff = await git.diff(['--no-index', '/dev/null', ...status.not_added.map(file => path.join(repoDir, file))]).catch(() => '');
|
||||||
|
|
||||||
|
// Combine the diffs
|
||||||
|
let patch = diff;
|
||||||
|
if (untrackedDiff) {
|
||||||
|
patch += '\n\n' + untrackedDiff;
|
||||||
|
}
|
||||||
|
|
||||||
|
return patch || "No changes detected.";
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configure git with credentials
|
* Configure git with credentials
|
||||||
* @param repoDir Path to the repository
|
* @param repoDir Path to the repository
|
||||||
|
@ -34,9 +34,12 @@ export interface ProcessResult {
|
|||||||
workitem: Workitem;
|
workitem: Workitem;
|
||||||
success: boolean;
|
success: boolean;
|
||||||
error?: string;
|
error?: string;
|
||||||
|
status?: 'skipped' | 'updated' | 'created';
|
||||||
|
filesWritten?: string[];
|
||||||
}[];
|
}[];
|
||||||
pullRequestUrl?: string;
|
pullRequestUrl?: string;
|
||||||
error?: string;
|
error?: string;
|
||||||
|
gitPatch?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -62,5 +65,8 @@ export interface ProjectSummary {
|
|||||||
workitemsProcessed: number;
|
workitemsProcessed: number;
|
||||||
workitemsSkipped: number;
|
workitemsSkipped: number;
|
||||||
workitemsUpdated: number;
|
workitemsUpdated: number;
|
||||||
|
workitemsCreated: number;
|
||||||
|
filesWritten: number;
|
||||||
pullRequestUrl?: string;
|
pullRequestUrl?: string;
|
||||||
|
gitPatch?: string;
|
||||||
}
|
}
|
||||||
|
@ -39,6 +39,10 @@ A work item prompt file follows the following format:
|
|||||||
- [ ] Implementation: <reference of the implementation within the project repo, optionally with a link>
|
- [ ] Implementation: <reference of the implementation within the project repo, optionally with a link>
|
||||||
- [ ] Active
|
- [ ] Active
|
||||||
|
|
||||||
|
### Log
|
||||||
|
|
||||||
|
<log to be filled as the workitem is processed>
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The active checkbox is optional and should be checked if the workitem is active. Inactive workitems should be ignored.
|
The active checkbox is optional and should be checked if the workitem is active. Inactive workitems should be ignored.
|
||||||
|
Loading…
x
Reference in New Issue
Block a user