WIP prompt engineering

This commit is contained in:
cghislai 2025-06-08 19:47:37 +02:00
parent 0bb5b9f876
commit fde6cf74a6
20 changed files with 180 additions and 311 deletions

126
package-lock.json generated
View File

@ -1,126 +0,0 @@
{
"name": "test-ai-code-agents",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"devDependencies": {
"@types/express": "^5.0.3"
}
},
"node_modules/@types/body-parser": {
"version": "1.19.6",
"resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz",
"integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/connect": "*",
"@types/node": "*"
}
},
"node_modules/@types/connect": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz",
"integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/express": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.3.tgz",
"integrity": "sha512-wGA0NX93b19/dZC1J18tKWVIYWyyF2ZjT9vin/NRu0qzzvfVzWjs04iq2rQ3H65vCTQYlRqs3YHfY7zjdV+9Kw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/body-parser": "*",
"@types/express-serve-static-core": "^5.0.0",
"@types/serve-static": "*"
}
},
"node_modules/@types/express-serve-static-core": {
"version": "5.0.6",
"resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.6.tgz",
"integrity": "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*",
"@types/qs": "*",
"@types/range-parser": "*",
"@types/send": "*"
}
},
"node_modules/@types/http-errors": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz",
"integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/mime": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
"integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/node": {
"version": "22.15.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.30.tgz",
"integrity": "sha512-6Q7lr06bEHdlfplU6YRbgG1SFBdlsfNC4/lX+SkhiTs0cpJkOElmWls8PxDFv4yY/xKb8Y6SO0OmSX4wgqTZbA==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
}
},
"node_modules/@types/qs": {
"version": "6.14.0",
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz",
"integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/range-parser": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz",
"integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/send": {
"version": "0.17.5",
"resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.5.tgz",
"integrity": "sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/mime": "^1",
"@types/node": "*"
}
},
"node_modules/@types/serve-static": {
"version": "1.15.8",
"resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.8.tgz",
"integrity": "sha512-roei0UY3LhpOJvjbIP6ZZFngyLKl5dskOtDhxY5THRSpO+ZI+nzJ+m5yUMzGrp89YRa7lvknKkMYjqQFGwA7Sg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/http-errors": "*",
"@types/node": "*",
"@types/send": "*"
}
},
"node_modules/undici-types": {
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true,
"license": "MIT"
}
}
}

View File

@ -1,5 +0,0 @@
{
"devDependencies": {
"@types/express": "^5.0.3"
}
}

View File

@ -23,6 +23,10 @@ export class ProjectService {
return this.sharedProjectService.findProjects(promptsDir, 'prompts-to-test-spec'); return this.sharedProjectService.findProjects(promptsDir, 'prompts-to-test-spec');
} }
async collectRelevantFiles(project: Project, projectRepoPath: string): Promise<Record<string, string>> {
return this.sharedProjectService.collectRelevantFiles(project, projectRepoPath);
}
/** /**
* Find all workitems in a project * Find all workitems in a project
* @param projectPath Path to the project directory * @param projectPath Path to the project directory

View File

@ -49,10 +49,13 @@ export class ProjectWorkitemsService {
// Read project guidelines // Read project guidelines
const projectGuidelines = await this.projectService.readProjectGuidelines(project.path); const projectGuidelines = await this.projectService.readProjectGuidelines(project.path);
// Collect all relevant files from the project directory
const relevantFiles = await this.projectService.collectRelevantFiles(project, projectRepoPath);
// Process each workitem // Process each workitem
const processedWorkitems: ProcessedWorkItem[] = []; const processedWorkitems: ProcessedWorkItem[] = [];
for (const workitem of workitems) { for (const workitem of workitems) {
const result: ProcessedWorkItem = await this.processWorkitem(project, projectRepoPath, workitem, projectGuidelines); const result: ProcessedWorkItem = await this.processWorkitem(project, projectRepoPath, workitem, projectGuidelines, relevantFiles);
processedWorkitems.push(result); processedWorkitems.push(result);
} }
@ -96,7 +99,8 @@ export class ProjectWorkitemsService {
project: Project, project: Project,
projectRepoPath: string, projectRepoPath: string,
workitem: Workitem, workitem: Workitem,
projectGuidelines: string projectGuidelines: string,
relevantFiles: Record<string, string>
): Promise<ProcessedWorkItem> { ): Promise<ProcessedWorkItem> {
try { try {
// Set the current workitem // Set the current workitem
@ -105,9 +109,6 @@ export class ProjectWorkitemsService {
// Read workitem content // Read workitem content
const workitemContent = fs.readFileSync(workitem.path, 'utf-8'); const workitemContent = fs.readFileSync(workitem.path, 'utf-8');
// Collect all relevant files from the project directory
const relevantFiles = await this.collectRelevantFiles(project, projectRepoPath, workitem);
// Let Gemini decide what to do with the workitem // Let Gemini decide what to do with the workitem
const result = await this.generateFeatureFile( const result = await this.generateFeatureFile(
projectRepoPath, projectRepoPath,
@ -150,36 +151,6 @@ export class ProjectWorkitemsService {
} }
} }
/**
* Collect relevant files from the project directory
* @param project The project info
* @param workitem The workitem being processed (for logging purposes)
* @returns Object containing file contents
*/
private async collectRelevantFiles(project: Project, projectRepoPath: string, workitem: Workitem): Promise<Record<string, string>> {
const relevantFiles: Record<string, string> = {};
try {
const guidelinePaths = project.aiGuidelines?.split(',') ?? [
'INFO.md', 'README.md', 'GUIDELINES.md', 'ARCHITECTURE.md', 'IMPLEMENTATION.md'
];
guidelinePaths
.map(g => g.trim())
.forEach(fileName => {
console.debug("Collected guideline file: " + fileName);
const filePath = path.join(projectRepoPath, fileName);
if (fs.existsSync(filePath)) {
relevantFiles[fileName] = fs.readFileSync(filePath, 'utf-8');
}
});
console.log(`ProjectWorkitemsService: Collected ${Object.keys(relevantFiles).length} guideline files for workitem ${workitem.name}`);
} catch (error) {
console.error(`Error collecting relevant files for workitem ${workitem.name}:`, error);
}
return relevantFiles;
}
/** /**
* Generate feature file content using Gemini API * Generate feature file content using Gemini API

View File

@ -9,7 +9,7 @@ import {
FunctionDeclarationSchemaType, FunctionDeclarationSchemaType,
FunctionResponse, FunctionResponse,
GenerateContentRequest, GenerateContentRequest,
GenerativeModel, GenerativeModel, GenerativeModelPreview,
Tool, Tool,
VertexAI VertexAI
} from '@google-cloud/vertexai'; } from '@google-cloud/vertexai';
@ -37,6 +37,7 @@ export interface GeminiResponse {
reason: string; reason: string;
}[]; }[];
modelResponses: string[]; modelResponses: string[];
modelSummary?: string;
inputCost?: number; inputCost?: number;
outputCost?: number; outputCost?: number;
totalCost?: number; totalCost?: number;
@ -69,6 +70,7 @@ export class GeminiFileSystemService {
this.vertexAI = new VertexAI({ this.vertexAI = new VertexAI({
project: this.projectId, project: this.projectId,
location: this.location, location: this.location,
apiEndpoint: 'aiplatform.googleapis.com'
}); });
// Define file operation functions // Define file operation functions
@ -448,12 +450,13 @@ Create a new work list is additional scanning / editing is required.
`Complete the session: `Complete the session:
Once you have completed all steps, call reportStepOutcome with outcome 'end'`, Once you have completed all steps, call reportStepOutcome with outcome 'end'`,
]; ];
const promptContents: Content[] = prompts.map(promptPart => { const promptContents: Content[] = [{
return {role: 'user', parts: [{text: promptPart}]} role: 'user',
}) parts: prompts.map(promptPart => ({text: promptPart}))
}];
// Instantiate the model with our file operation tools // Instantiate the model with our file operation tools
const generativeModel = this.vertexAI.getGenerativeModel({ const generativeModel = this.vertexAI.preview.getGenerativeModel({
model: this.model, model: this.model,
tools: this.fileOperationTools, tools: this.fileOperationTools,
generation_config: { generation_config: {
@ -581,7 +584,7 @@ Once you have completed all steps, call reportStepOutcome with outcome 'end'`,
} }
} }
private async handleGeminiStream(generativeModel: GenerativeModel, request: GenerateContentRequest, private async handleGeminiStream(generativeModel: GenerativeModel | GenerativeModelPreview, request: GenerateContentRequest,
rootPath: string, rootPath: string,
geminiResponse: GeminiResponse = { geminiResponse: GeminiResponse = {
stepOutcomes: [], stepOutcomes: [],
@ -625,9 +628,9 @@ Once you have completed all steps, call reportStepOutcome with outcome 'end'`,
if (part.functionCall) { if (part.functionCall) {
const functionCall = part.functionCall; const functionCall = part.functionCall;
pendingFunctionCalls.push(functionCall); pendingFunctionCalls.push(functionCall);
} else if (part.text) { } else if (part.text != null) {
const textContent = part.text; const textContent = part.text;
geminiResponse.modelResponses.push(textContent); textContent && geminiResponse.modelResponses.push(textContent);
} else { } else {
console.warn(`Unhandled response part: ${JSON.stringify(part)}`); console.warn(`Unhandled response part: ${JSON.stringify(part)}`);
} }
@ -658,7 +661,8 @@ Once you have completed all steps, call reportStepOutcome with outcome 'end'`,
const updatedContent = this.createReevaluationContrent(); const updatedContent = this.createReevaluationContrent();
updatedRequestContents.push(...updatedContent); updatedRequestContents.push(...updatedContent);
} else if (outcome === 'end-confirmed') { } else if (outcome === 'end-confirmed') {
console.log('End confirmed'); console.log('End confirmed: ' + reason);
geminiResponse.modelSummary = reason;
endReceived = true; endReceived = true;
} else { } else {
geminiResponse.stepOutcomes.push({ geminiResponse.stepOutcomes.push({

View File

@ -95,6 +95,7 @@ You are tasked with creating a pull request description for changes to test spec
The following is a summary of the changes made: The following is a summary of the changes made:
${description} ${description}
${gitPatch && gitPatch !== "No changes detected." ? gitPatchSection : ''} ${gitPatch && gitPatch !== "No changes detected." ? gitPatchSection : ''}
Create a clear, professional pull request description that: Create a clear, professional pull request description that:

View File

@ -14,7 +14,7 @@ export class ProjectService {
* @param functionName Name of the function to find projects for * @param functionName Name of the function to find projects for
* @returns Array of projects * @returns Array of projects
*/ */
findProjects(promptsDir: string, functionName: string): Project[] { async findProjects(promptsDir: string, functionName: string): Promise<Project[]> {
const projects: Project[] = []; const projects: Project[] = [];
// Check if prompts directory exists // Check if prompts directory exists
@ -43,7 +43,7 @@ export class ProjectService {
} }
// Read project info // Read project info
const project = this.readProjectInfo(projectPath, dir.name); const project = await this.readProjectInfo(projectPath, dir.name);
projects.push(project); projects.push(project);
} }
@ -68,7 +68,7 @@ export class ProjectService {
* - [x] Jira component: project-component * - [x] Jira component: project-component
* ``` * ```
*/ */
readProjectInfo(projectPath: string, projectName: string): Project { async readProjectInfo(projectPath: string, projectName: string): Promise<Project> {
const infoPath = path.join(projectPath, 'INFO.md'); const infoPath = path.join(projectPath, 'INFO.md');
if (!fs.existsSync(infoPath)) { if (!fs.existsSync(infoPath)) {
@ -88,20 +88,57 @@ export class ProjectService {
const targetBranchMatch = infoContent.match(/- \[[x]\] Target branch: (.*)/); const targetBranchMatch = infoContent.match(/- \[[x]\] Target branch: (.*)/);
const jiraComponentMatch = infoContent.match(/- \[[x]\] Jira component: (.*)/); const jiraComponentMatch = infoContent.match(/- \[[x]\] Jira component: (.*)/);
const aiGuidelinesMatch = infoContent.match(/- \[[x]\] AI guidelines: (.*)/); const aiGuidelinesMatch = infoContent.match(/- \[[x]\] AI guidelines: (.*)/);
const remoteDataMatch = infoContent.match(/- \[[x]\] Remote data: (.*)/);
const project = { const remoteUris = remoteDataMatch ? remoteDataMatch[1].trim().split(',') : [];
const project: Project = {
name: projectName, name: projectName,
path: projectPath, path: projectPath,
repoHost: repoHostMatch ? repoHostMatch[1].trim() : undefined, repoHost: repoHostMatch ? repoHostMatch[1].trim() : undefined,
repoUrl: repoUrlMatch ? repoUrlMatch[1].trim() : undefined, repoUrl: repoUrlMatch ? repoUrlMatch[1].trim() : undefined,
targetBranch: targetBranchMatch ? targetBranchMatch[1].trim() : undefined, targetBranch: targetBranchMatch ? targetBranchMatch[1].trim() : undefined,
jiraComponent: jiraComponentMatch ? jiraComponentMatch[1].trim() : undefined, jiraComponent: jiraComponentMatch ? jiraComponentMatch[1].trim() : undefined,
aiGuidelines: aiGuidelinesMatch ? aiGuidelinesMatch[1].trim() : undefined aiGuidelines: aiGuidelinesMatch ? aiGuidelinesMatch[1].trim().split(',') : undefined,
remoteDataUris: remoteUris,
}; };
return project; return project;
} }
async collectRelevantFiles(project: Project, projectRepoPath: string): Promise<Record<string, string>> {
const relevantFiles: Record<string, string> = {};
try {
const guidelinePaths = project.aiGuidelines ?? [
'INFO.md', 'README.md', 'GUIDELINES.md', 'ARCHITECTURE.md', 'IMPLEMENTATION.md'
];
guidelinePaths
.map(g => g.trim())
.forEach(fileName => {
console.debug("Collected guideline file: " + fileName);
const filePath = path.join(projectRepoPath, fileName);
if (fs.existsSync(filePath)) {
relevantFiles[fileName] = fs.readFileSync(filePath, 'utf-8');
}
});
const remoteUris = project.remoteDataUris ?? [];
for (const uri of remoteUris) {
const data = await this.fetchRemoteData(uri);
relevantFiles[uri] = data
console.debug("Collected remote data: " + uri);
}
console.log(`Collected ${Object.keys(relevantFiles).length} additional files for project ${project.name}`);
} catch (error) {
console.error(`Error collecting additional files for project ${project.name}:`, error);
}
return relevantFiles;
}
/** /**
* Read AI guidelines for a project * Read AI guidelines for a project
* @param projectPath Path to the project directory * @param projectPath Path to the project directory
@ -116,4 +153,14 @@ export class ProjectService {
return fs.readFileSync(aiPath, 'utf-8'); return fs.readFileSync(aiPath, 'utf-8');
} }
private async fetchRemoteData(uri: string): Promise<string> {
try {
const resposne = await fetch(uri);
return await resposne.text();
} catch (e) {
console.error(e);
throw new Error(`Failed to fetch remote data from ${uri}`);
}
}
} }

View File

@ -9,7 +9,8 @@ export interface Project {
repoUrl?: string; repoUrl?: string;
jiraComponent?: string; jiraComponent?: string;
targetBranch?: string; targetBranch?: string;
aiGuidelines?: string; aiGuidelines?: string[];
remoteDataUris?: string[];
} }
export interface RepoCredentials { export interface RepoCredentials {

View File

@ -22,14 +22,19 @@ describe('formatHttpResponse', () => {
project: {name: 'project1', path: '/path/to/project1'}, project: {name: 'project1', path: '/path/to/project1'},
success: true, success: true,
filesWritten: ['file1.ts', 'file2.ts'], filesWritten: ['file1.ts', 'file2.ts'],
filesRemoved: ['file3.ts'], filesDeleted: ['file3.ts'],
stepOutcomes: [],
modelResponses: [],
pullRequestUrl: 'https://github.com/org/repo/pull/1' pullRequestUrl: 'https://github.com/org/repo/pull/1'
}, },
{ {
project: {name: 'project2', path: '/path/to/project2'}, project: {name: 'project2', path: '/path/to/project2'},
success: true, success: true,
filesWritten: ['file4.ts'], filesWritten: ['file4.ts'],
filesRemoved: [], filesDeleted: [],
stepOutcomes: [],
modelResponses: [],
pullRequestUrl: 'https://github.com/org/repo/pull/2' pullRequestUrl: 'https://github.com/org/repo/pull/2'
} }
]; ];
@ -63,13 +68,19 @@ describe('formatHttpResponse', () => {
project: {name: 'project1', path: '/path/to/project1'}, project: {name: 'project1', path: '/path/to/project1'},
success: true, success: true,
filesWritten: ['file1.ts'], filesWritten: ['file1.ts'],
filesRemoved: [], stepOutcomes: [],
modelResponses: [],
filesDeleted: [],
pullRequestUrl: 'https://github.com/org/repo/pull/1' pullRequestUrl: 'https://github.com/org/repo/pull/1'
}, },
{ {
project: {name: 'project2', path: '/path/to/project2'}, project: {name: 'project2', path: '/path/to/project2'},
success: false, success: false,
error: 'Something went wrong' error: 'Something went wrong',
stepOutcomes: [],
modelResponses: [],
filesDeleted: [],
filesWritten: []
} }
]; ];
@ -115,7 +126,11 @@ describe('formatHttpResponse', () => {
const results: ProcessResult[] = [ const results: ProcessResult[] = [
{ {
project: {name: 'project1', path: '/path/to/project1'}, project: {name: 'project1', path: '/path/to/project1'},
success: true success: true,
stepOutcomes: [],
modelResponses: [],
filesDeleted: [],
filesWritten: []
} }
]; ];
@ -182,7 +197,11 @@ describe('HTTP endpoint handler', () => {
project: {name: 'project1', path: '/path/to/project1'}, project: {name: 'project1', path: '/path/to/project1'},
success: true, success: true,
filesWritten: ['file1.ts'], filesWritten: ['file1.ts'],
pullRequestUrl: 'https://github.com/org/repo/pull/1' pullRequestUrl: 'https://github.com/org/repo/pull/1',
stepOutcomes: [],
modelResponses: [],
filesDeleted: [],
} }
]; ];

View File

@ -135,7 +135,9 @@ describe('ProcessorService', () => {
project: mockProjects[0], project: mockProjects[0],
success: true, success: true,
filesWritten: ['file1.ts'], filesWritten: ['file1.ts'],
filesRemoved: [], filesDeleted: [],
stepOutcomes: [],
modelResponses: [],
gitPatch: 'mock git patch 1' gitPatch: 'mock git patch 1'
}; };
@ -143,7 +145,9 @@ describe('ProcessorService', () => {
project: mockProjects[1], project: mockProjects[1],
success: true, success: true,
filesWritten: ['file2.ts'], filesWritten: ['file2.ts'],
filesRemoved: [], filesDeleted: [],
stepOutcomes: [],
modelResponses: [],
gitPatch: 'mock git patch 2' gitPatch: 'mock git patch 2'
}; };
@ -177,7 +181,9 @@ describe('ProcessorService', () => {
project: mockProjects[0], project: mockProjects[0],
success: true, success: true,
filesWritten: ['file1.ts'], filesWritten: ['file1.ts'],
filesRemoved: [], filesDeleted: [],
stepOutcomes: [],
modelResponses: [],
gitPatch: 'mock git patch 1' gitPatch: 'mock git patch 1'
}; };
@ -249,7 +255,9 @@ describe('ProcessorService', () => {
project: mockProject, project: mockProject,
success: true, success: true,
filesWritten: ['file1.ts'], filesWritten: ['file1.ts'],
filesRemoved: [], filesDeleted: [],
stepOutcomes: [],
modelResponses: [],
gitPatch: 'mock git patch' gitPatch: 'mock git patch'
}; };

View File

@ -132,7 +132,7 @@ describe('ProjectTestSpecsService', () => {
expect(mockProjectService.readProjectGuidelines).toHaveBeenCalledWith(project.path); expect(mockProjectService.readProjectGuidelines).toHaveBeenCalledWith(project.path);
expect(result.success).toBe(true); expect(result.success).toBe(true);
expect(result.filesWritten).toEqual(mockImplementationResult.filesWritten); expect(result.filesWritten).toEqual(mockImplementationResult.filesWritten);
expect(result.filesRemoved).toEqual(mockImplementationResult.filesDeleted); expect(result.filesDeleted).toEqual(mockImplementationResult.filesDeleted);
expect(result.gitPatch).toBe('mock git patch'); expect(result.gitPatch).toBe('mock git patch');
}); });
@ -179,54 +179,6 @@ describe('ProjectTestSpecsService', () => {
}); });
}); });
describe('collectRelevantFiles', () => {
test('should collect relevant files from project directory', async () => {
// Arrange
const project = {name: 'project1', path: '/path/to/project1'};
const projectRepoPath = '/path/to/project/repo';
// Mock fs.existsSync to return true for specific files
(fs.existsSync as jest.Mock).mockImplementation((filePath) => {
if (filePath.includes('nitro-it/src/test/java/be/fiscalteam/nitro/bdd')) return true;
return filePath.includes('INFO.md') || filePath.includes('README.md');
});
// Mock fs.readFileSync to return file content
(fs.readFileSync as jest.Mock).mockImplementation((filePath) => {
if (filePath.includes('INFO.md')) return 'INFO.md content';
if (filePath.includes('README.md')) return 'README.md content';
return '';
});
// Act
const result = await (projectTestSpecsService as any).collectRelevantFiles(project, projectRepoPath);
// Assert
expect(Object.keys(result)).toContain('INFO.md');
expect(Object.keys(result)).toContain('README.md');
expect(result['INFO.md']).toBe('INFO.md content');
expect(result['README.md']).toBe('README.md content');
});
test('should handle errors when collecting relevant files', async () => {
// Arrange
const project = {name: 'project1', path: '/path/to/project1'};
const projectRepoPath = '/path/to/project/repo';
// Mock fs.existsSync to throw an error
(fs.existsSync as jest.Mock).mockImplementation(() => {
throw new Error('File system error');
});
// Act
const result = await (projectTestSpecsService as any).collectRelevantFiles(project, projectRepoPath);
// Assert
expect(result).toEqual({});
expect(consoleErrorSpy).toHaveBeenCalled();
});
});
describe('generateImplementation', () => { describe('generateImplementation', () => {
test('should generate implementation using Gemini', async () => { test('should generate implementation using Gemini', async () => {
// Arrange // Arrange

View File

@ -31,7 +31,7 @@ export function formatHttpResponse(results: ProcessResult[]): HttpResponse {
success: result.success ?? false, success: result.success ?? false,
error: result.error, error: result.error,
filesWritten: result.filesWritten?.length ?? 0, filesWritten: result.filesWritten?.length ?? 0,
filesRemoved: result.filesRemoved?.length ?? 0, filesRemoved: result.filesDeleted?.length ?? 0,
pullRequestUrl: result.pullRequestUrl, pullRequestUrl: result.pullRequestUrl,
}; };
}); });

View File

@ -151,7 +151,11 @@ export class ProcessorService {
results.push({ results.push({
project, project,
success: false, success: false,
error: error instanceof Error ? error.message : String(error) error: error instanceof Error ? error.message : String(error),
stepOutcomes: [],
filesWritten: [],
filesDeleted: [],
modelResponses: []
}); });
} }
} }
@ -179,7 +183,12 @@ export class ProcessorService {
return { return {
project, project,
success: false, success: false,
error: "No repository URL found" error: "No repository URL found",
stepOutcomes: [],
filesWritten: [],
filesDeleted: [],
modelResponses: []
}; };
} }
@ -234,11 +243,16 @@ export class ProcessorService {
// Generate PR description using Gemini // Generate PR description using Gemini
const modelResponses = result.modelResponses ?? []; const modelResponses = result.modelResponses ?? [];
const lastModelResponse = modelResponses.slice(Math.max(modelResponses.length - 10, 0), modelResponses.length); const lastModelResponse = modelResponses.slice(Math.max(modelResponses.length - 10, 0), modelResponses.length);
const firstModelResponse = modelResponses.slice(0, Math.min(modelResponses.length, 10));
const changeDescription = ` const changeDescription = `
feature spec implementation. Test feature spec implemented.
${result.totalCost} tokens consumed to write ${result.filesWritten?.length ?? 0} files`; Model summary: ${result.modelSummary}
`last model responses:
First model responses:
${firstModelResponse.join('\n')}
Last model responses:
${lastModelResponse.join('\n')} ${lastModelResponse.join('\n')}
`; `;
@ -256,7 +270,9 @@ export class ProcessorService {
branchName, branchName,
credentials, credentials,
title, title,
prDescription `${prDescription}
${result.totalCost} tokens consumed to write ${result.filesWritten?.length ?? 0} files`
); );
console.log(`Created pull request: ${pullRequestUrl}`); console.log(`Created pull request: ${pullRequestUrl}`);
@ -271,7 +287,11 @@ export class ProcessorService {
return { return {
project, project,
success: false, success: false,
error: error instanceof Error ? error.message : String(error) error: error instanceof Error ? error.message : String(error),
modelResponses: [],
filesDeleted: [],
filesWritten: [],
stepOutcomes: []
}; };
} }
} }

View File

@ -37,4 +37,8 @@ export class ProjectService {
async readProjectGuidelines(projectPath: string): Promise<string> { async readProjectGuidelines(projectPath: string): Promise<string> {
return this.sharedProjectService.readProjectGuidelines(projectPath); return this.sharedProjectService.readProjectGuidelines(projectPath);
} }
async collectRelevantFiles(project: Project, projectPath: string): Promise<Record<string, string>> {
return this.sharedProjectService.collectRelevantFiles(project, projectPath);
}
} }

View File

@ -1,14 +1,12 @@
/** /**
* Service for handling test spec operations within a project * Service for handling test spec operations within a project
*/ */
import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import {ProcessResult} from '../types'; import {ProcessResult} from '../types';
import {ProjectService} from './project-service'; import {ProjectService} from './project-service';
import {DRY_RUN_SKIP_GEMINI} from '../config'; import {DRY_RUN_SKIP_GEMINI} from '../config';
import {GeminiFileSystemService, Project, RepositoryService as SharedRepositoryService,} from 'shared-functions'; import {GeminiFileSystemService, Project, RepositoryService as SharedRepositoryService,} from 'shared-functions';
import {GeminiResponse} from "shared-functions/dist/services/gemini-file-system-service"; import {GeminiResponse} from "shared-functions/dist/services/gemini-file-system-service";
import {success} from "concurrently/dist/src/defaults";
export class ProjectTestSpecsService { export class ProjectTestSpecsService {
private projectService: ProjectService; private projectService: ProjectService;
@ -39,7 +37,7 @@ export class ProjectTestSpecsService {
// Generate git patch if any files were written // Generate git patch if any files were written
let gitPatch: string | undefined = undefined; let gitPatch: string | undefined = undefined;
if ((result.filesWritten?.length ?? 0) > 0 || (result.filesRemoved?.length ?? 0) > 0) { if ((result.filesWritten?.length ?? 0) > 0 || (result.filesDeleted?.length ?? 0) > 0) {
try { try {
console.log(`Generating git patch for project ${project.name} with ${result.filesWritten?.length} files written`); console.log(`Generating git patch for project ${project.name} with ${result.filesWritten?.length} files written`);
gitPatch = await this.sharedRepositoryService.generateGitPatch(projectRepoPath); gitPatch = await this.sharedRepositoryService.generateGitPatch(projectRepoPath);
@ -59,7 +57,11 @@ export class ProjectTestSpecsService {
return { return {
project: project, project: project,
success: false, success: false,
error: error instanceof Error ? error.message : String(error) error: error instanceof Error ? error.message : String(error),
stepOutcomes: [],
filesWritten: [],
filesDeleted: [],
modelResponses: []
}; };
} }
} }
@ -78,7 +80,7 @@ export class ProjectTestSpecsService {
): Promise<ProcessResult> { ): Promise<ProcessResult> {
try { try {
// Collect all relevant files from the project directory // Collect all relevant files from the project directory
const relevantFiles = await this.collectRelevantFiles(project, projectRepoPath); const relevantFiles = await this.projectService.collectRelevantFiles(project, projectRepoPath);
// Let Gemini generate the implementation // Let Gemini generate the implementation
const result = await this.generateAllTestSpecs( const result = await this.generateAllTestSpecs(
@ -91,9 +93,7 @@ export class ProjectTestSpecsService {
return { return {
project: project, project: project,
success: true, success: true,
filesWritten: result.filesWritten, ...result
filesRemoved: result.filesDeleted,
totalCost: result.totalCost
}; };
} catch (error) { } catch (error) {
console.error(`Error processing project ${project.name}:`, error); console.error(`Error processing project ${project.name}:`, error);
@ -101,43 +101,14 @@ export class ProjectTestSpecsService {
project: project, project: project,
success: false, success: false,
error: error instanceof Error ? error.message : String(error), error: error instanceof Error ? error.message : String(error),
stepOutcomes: [],
filesWritten: [],
filesDeleted: [],
modelResponses: []
}; };
} }
} }
/**
* Collect relevant files from the project directory
* @param project The project info
* @param projectRepoPath Path to the project repository
* @param testSpec The test spec being processed (for logging purposes)
* @returns Object containing file contents
*/
private async collectRelevantFiles(project: Project, projectRepoPath: string): Promise<Record<string, string>> {
const relevantFiles: Record<string, string> = {};
try {
// Add project guidelines
const guidelinePaths = project.aiGuidelines?.split(',') ?? [
'INFO.md', 'README.md', 'GUIDELINES.md', 'ARCHITECTURE.md', 'IMPLEMENTATION.md'
];
guidelinePaths
.map(g => g.trim())
.forEach(fileName => {
console.debug("Collected guideline file: " + fileName);
const filePath = path.join(projectRepoPath, fileName);
if (fs.existsSync(filePath)) {
relevantFiles[fileName] = fs.readFileSync(filePath, 'utf-8');
}
});
console.log(`ProjectTestSpecsService: Collected ${Object.keys(relevantFiles).length} relevant files for ${project.name}`);
} catch (error) {
console.error(`Error collecting relevant files for ${project.name}:`, error);
}
return relevantFiles;
}
/** /**
* Generate implementation using Gemini API * Generate implementation using Gemini API
* @param projectRepoPath Path to the project repository * @param projectRepoPath Path to the project repository

View File

@ -3,6 +3,7 @@
*/ */
import {Project} from "shared-functions"; import {Project} from "shared-functions";
import {GeminiResponse} from "shared-functions/dist/services/gemini-file-system-service";
/** /**
* Status of a test spec implementation * Status of a test spec implementation
@ -28,16 +29,12 @@ export interface RepoCredentials {
token?: string; token?: string;
} }
export interface ProcessResult { export interface ProcessResult extends GeminiResponse {
project: Project; project: Project;
success?: boolean; success: boolean;
pullRequestUrl?: string;
error?: string; error?: string;
gitPatch?: string; gitPatch?: string;
filesWritten?: string[]; pullRequestUrl?: string;
filesRemoved?: string[];
totalCost?: number;
modelResponses?: string[];
} }
/** /**

View File

@ -24,6 +24,7 @@ A project info file follows the following format:
- [ ] Repo url: <url of the project repository> - [ ] Repo url: <url of the project repository>
- [ ] Target branch: <target branch for the PR> - [ ] Target branch: <target branch for the PR>
- [ ] AI guidelines: <path to ai guidelines md file in the project repo> - [ ] AI guidelines: <path to ai guidelines md file in the project repo>
- [ ] Remote data: <url to remote data to include in prompt>
- [ ] Jira component: <component of the jira> - [ ] Jira component: <component of the jira>
``` ```

View File

@ -22,6 +22,7 @@ A project info file follows the following format:
- [ ] Repo url: <url of the project repository> - [ ] Repo url: <url of the project repository>
- [ ] Target branch: <target branch for the PR> - [ ] Target branch: <target branch for the PR>
- [ ] AI guidelines: <path to ai guidelines md file in the project repo> - [ ] AI guidelines: <path to ai guidelines md file in the project repo>
- [ ] Remote data: <url to remote data to include in prompt>
- [ ] Jira component: <component of the jira> - [ ] Jira component: <component of the jira>
``` ```

View File

@ -6,10 +6,9 @@ Implement tests according to the cucumber ".feature" files.
- All files and all their method must be correctly implemented, without any TODO or stub or placeholder. - All files and all their method must be correctly implemented, without any TODO or stub or placeholder.
- The code produced must be ready for test driven development without any adaptation required. - The code produced must be ready for test driven development without any adaptation required.
- The tests are business-driven integration tests: A real api must be accessed to ensure proper application - The tests are business-driven integration tests: A real api must be accessed to ensure proper application
behavior. behavior. Dont use mocks. Dont use stubs. Dont use fakes. Dont let someone else write the implementation.
- Scan the existing api in nitro-domain-api/src/main/java to implement http requests to the api endpoints. - Use the following techniques to identify the relevant resources within the codebase:
- Use the following techniques to identify the relevant resources:
- search for patterns like 'class Ws*<resource-name-camel-case>*' to identify api models file names - search for patterns like 'class Ws*<resource-name-camel-case>*' to identify api models file names
- search for patterns like 'interface Ws*<resource-name-camel-case>*Controller' to identify api controller file - search for patterns like 'interface Ws*<resource-name-camel-case>*Controller' to identify api controller file
names names
@ -19,10 +18,9 @@ Implement tests according to the cucumber ".feature" files.
- Get a complete understanding of the relevant resources, how they relate to each other, and the available operations. - Get a complete understanding of the relevant resources, how they relate to each other, and the available operations.
- Get a complete understanding of the various entities composing the business resources - Get a complete understanding of the various entities composing the business resources
- Create missing global configuration in nitro-it/src/test/resources/application-bdd.properties - Create required configuration in nitro-it/src/test/resources/application-bdd.properties
- create or update @ApplicationScoped services in nitro-it/src/test/java/be/fiscalteam/nitro/bdd/services/ - create or update @ApplicationScoped services in nitro-it/src/test/java/be/fiscalteam/nitro/bdd/services/
to implement the test logic if needed
- Those services must be fully implemented and make actual http requests to the api endpoints when called.
For each feature file, create or update the implementation in nitro-it/src/test/java/be/fiscalteam/nitro/bdd/features/< For each feature file, create or update the implementation in nitro-it/src/test/java/be/fiscalteam/nitro/bdd/features/<
feature-name>/ feature-name>/

View File

@ -6,4 +6,5 @@ Nitro backend server in quarkus
- [x] Repo url: https://gitea.fteamdev.valuya.be/cghislai/nitro-back.git - [x] Repo url: https://gitea.fteamdev.valuya.be/cghislai/nitro-back.git
- [x] Target branch: main - [x] Target branch: main
- [x] AI guidelines: nitro-it/src/test/resources/workitems/AI_IMPLEMENTATION.md - [x] AI guidelines: nitro-it/src/test/resources/workitems/AI_IMPLEMENTATION.md
- [ ] Remote data: https://api.nitrodev.ebitda.tech/domain-ws/q/openapi
- [x] Jira component: nitro - [x] Jira component: nitro