WIP
This commit is contained in:
parent
6568ff640d
commit
21379adf96
1
.idea/test-ai-code-agents.iml
generated
1
.idea/test-ai-code-agents.iml
generated
@ -4,6 +4,7 @@
|
|||||||
<exclude-output />
|
<exclude-output />
|
||||||
<content url="file://$MODULE_DIR$">
|
<content url="file://$MODULE_DIR$">
|
||||||
<excludeFolder url="file://$MODULE_DIR$/src/functions/shared/coverage" />
|
<excludeFolder url="file://$MODULE_DIR$/src/functions/shared/coverage" />
|
||||||
|
<excludeFolder url="file://$MODULE_DIR$/src/functions/test-spec-to-test-implementation/coverage" />
|
||||||
</content>
|
</content>
|
||||||
<orderEntry type="inheritedJdk" />
|
<orderEntry type="inheritedJdk" />
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
node_modules/
|
node_modules/
|
||||||
dist/
|
dist/
|
||||||
.env
|
.env
|
||||||
|
coverage/
|
||||||
|
26
src/functions/test-spec-to-test-implementation/.env.example
Normal file
26
src/functions/test-spec-to-test-implementation/.env.example
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# Main repository configuration
|
||||||
|
MAIN_REPO_URL=https://github.com/Ebitda-SRL/test-ai-code-agents.git
|
||||||
|
MAIN_REPO_TOKEN=your_token_here
|
||||||
|
# MAIN_REPO_USERNAME=your_username
|
||||||
|
# MAIN_REPO_PASSWORD=your_password
|
||||||
|
|
||||||
|
# GitHub credentials
|
||||||
|
GITHUB_TOKEN=your_github_token
|
||||||
|
# GITHUB_USERNAME=your_github_username
|
||||||
|
# GITHUB_PASSWORD=your_github_password
|
||||||
|
|
||||||
|
# Gitea credentials
|
||||||
|
GITEA_USERNAME=your_gitea_username
|
||||||
|
GITEA_PASSWORD=your_gitea_password
|
||||||
|
|
||||||
|
# Google Cloud configuration
|
||||||
|
GOOGLE_CLOUD_PROJECT_ID=your-project-id
|
||||||
|
GOOGLE_CLOUD_LOCATION=us-central1
|
||||||
|
GEMINI_MODEL=gemini-1.5-pro
|
||||||
|
GOOGLE_API_KEY=your_api_key
|
||||||
|
|
||||||
|
# Function configuration
|
||||||
|
DEBUG=false
|
||||||
|
USE_LOCAL_REPO=true
|
||||||
|
DRY_RUN_SKIP_GEMINI=true
|
||||||
|
DRY_RUN_SKIP_COMMITS=true
|
4
src/functions/test-spec-to-test-implementation/.gitignore
vendored
Normal file
4
src/functions/test-spec-to-test-implementation/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
node_modules/
|
||||||
|
dist/
|
||||||
|
.env
|
||||||
|
coverage/
|
@ -0,0 +1,28 @@
|
|||||||
|
module.exports = {
|
||||||
|
preset: 'ts-jest',
|
||||||
|
testEnvironment: 'node',
|
||||||
|
roots: ['<rootDir>/src'],
|
||||||
|
testMatch: ['**/__tests__/**/*.ts', '**/?(*.)+(spec|test).ts'],
|
||||||
|
testPathIgnorePatterns: ['<rootDir>/src/__tests__/setup.ts'],
|
||||||
|
transform: {
|
||||||
|
'^.+\\.ts$': 'ts-jest',
|
||||||
|
},
|
||||||
|
moduleFileExtensions: ['ts', 'js', 'json', 'node'],
|
||||||
|
collectCoverage: true,
|
||||||
|
coverageDirectory: 'coverage',
|
||||||
|
collectCoverageFrom: [
|
||||||
|
'src/**/*.ts',
|
||||||
|
'!src/**/*.d.ts',
|
||||||
|
'!src/**/__tests__/**',
|
||||||
|
'!src/**/__mocks__/**',
|
||||||
|
],
|
||||||
|
coverageThreshold: {
|
||||||
|
global: {
|
||||||
|
branches: 70,
|
||||||
|
functions: 70,
|
||||||
|
lines: 70,
|
||||||
|
statements: 70,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
setupFiles: ['<rootDir>/src/__tests__/setup.ts'],
|
||||||
|
};
|
6103
src/functions/test-spec-to-test-implementation/package-lock.json
generated
Normal file
6103
src/functions/test-spec-to-test-implementation/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
41
src/functions/test-spec-to-test-implementation/package.json
Normal file
41
src/functions/test-spec-to-test-implementation/package.json
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
{
|
||||||
|
"name": "test-spec-to-test-implementation",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"prestart": "npm run build",
|
||||||
|
"deploy": "gcloud functions deploy testSpecToTestImplementationHttp --gen2 --runtime=nodejs20 --source=. --trigger-http --allow-unauthenticated",
|
||||||
|
"deploy:event": "gcloud functions deploy testSpecToTestImplementationEvent --gen2 --runtime=nodejs20 --source=. --trigger-event=google.cloud.storage.object.v1.finalized --trigger-resource=YOUR_BUCKET_NAME",
|
||||||
|
"clean": "rm -rf dist",
|
||||||
|
"test": "jest",
|
||||||
|
"test:watch": "jest --watch",
|
||||||
|
"dev": "npm run build && functions-framework --target=testSpecToTestImplementationHttp --port=18081",
|
||||||
|
"dev:watch": "concurrently \"tsc -w\" \"nodemon --watch dist/ --exec functions-framework --target=testSpecToTestImplementationHttp --port=18081\"",
|
||||||
|
"dev:event": "npm run build && functions-framework --target=testSpecToTestImplementationEvent --signature-type=event"
|
||||||
|
},
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"dependencies": {
|
||||||
|
"@google-cloud/functions-framework": "^3.0.0",
|
||||||
|
"@google-cloud/vertexai": "^0.5.0",
|
||||||
|
"axios": "^1.6.7",
|
||||||
|
"dotenv": "^16.4.5",
|
||||||
|
"shared-functions": "file:../shared",
|
||||||
|
"simple-git": "^3.23.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/express": "^5.0.3",
|
||||||
|
"@types/jest": "^29.5.12",
|
||||||
|
"@types/node": "^20.11.30",
|
||||||
|
"concurrently": "^8.2.2",
|
||||||
|
"jest": "^29.7.0",
|
||||||
|
"nodemon": "^3.0.3",
|
||||||
|
"ts-jest": "^29.1.2",
|
||||||
|
"typescript": "^5.8.3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=20"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,284 @@
|
|||||||
|
import { formatHttpResponse } from '../index';
|
||||||
|
import { ProcessResult, HttpResponse } from '../types';
|
||||||
|
import { ProcessorService } from '../services/processor-service';
|
||||||
|
|
||||||
|
// Mock the ProcessorService
|
||||||
|
jest.mock('../services/processor-service', () => {
|
||||||
|
const mockProcessProjects = jest.fn();
|
||||||
|
const mockProcessorInstance = {
|
||||||
|
processProjects: mockProcessProjects
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
ProcessorService: jest.fn().mockImplementation(() => mockProcessorInstance)
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('formatHttpResponse', () => {
|
||||||
|
test('should format successful results correctly', () => {
|
||||||
|
// Arrange
|
||||||
|
const results: ProcessResult[] = [
|
||||||
|
{
|
||||||
|
project: { name: 'project1', path: '/path/to/project1' },
|
||||||
|
success: true,
|
||||||
|
filesWritten: ['file1.ts', 'file2.ts'],
|
||||||
|
filesRemoved: ['file3.ts'],
|
||||||
|
pullRequestUrl: 'https://github.com/org/repo/pull/1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
project: { name: 'project2', path: '/path/to/project2' },
|
||||||
|
success: true,
|
||||||
|
filesWritten: ['file4.ts'],
|
||||||
|
filesRemoved: [],
|
||||||
|
pullRequestUrl: 'https://github.com/org/repo/pull/2'
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const response: HttpResponse = formatHttpResponse(results);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(response.success).toBe(true);
|
||||||
|
expect(response.projectsProcessed).toBe(2);
|
||||||
|
expect(response.projectsSucceeded).toBe(2);
|
||||||
|
expect(response.projectsFailed).toBe(0);
|
||||||
|
expect(response.mainPullRequestUrl).toBe('https://github.com/org/repo/pull/1');
|
||||||
|
expect(response.projects).toHaveLength(2);
|
||||||
|
expect(response.projects[0].name).toBe('project1');
|
||||||
|
expect(response.projects[0].success).toBe(true);
|
||||||
|
expect(response.projects[0].filesWritten).toBe(2);
|
||||||
|
expect(response.projects[0].filesRemoved).toBe(1);
|
||||||
|
expect(response.projects[0].pullRequestUrl).toBe('https://github.com/org/repo/pull/1');
|
||||||
|
expect(response.projects[1].name).toBe('project2');
|
||||||
|
expect(response.projects[1].success).toBe(true);
|
||||||
|
expect(response.projects[1].filesWritten).toBe(1);
|
||||||
|
expect(response.projects[1].filesRemoved).toBe(0);
|
||||||
|
expect(response.projects[1].pullRequestUrl).toBe('https://github.com/org/repo/pull/2');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should format results with failures correctly', () => {
|
||||||
|
// Arrange
|
||||||
|
const results: ProcessResult[] = [
|
||||||
|
{
|
||||||
|
project: { name: 'project1', path: '/path/to/project1' },
|
||||||
|
success: true,
|
||||||
|
filesWritten: ['file1.ts'],
|
||||||
|
filesRemoved: [],
|
||||||
|
pullRequestUrl: 'https://github.com/org/repo/pull/1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
project: { name: 'project2', path: '/path/to/project2' },
|
||||||
|
success: false,
|
||||||
|
error: 'Something went wrong'
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const response: HttpResponse = formatHttpResponse(results);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(response.success).toBe(false);
|
||||||
|
expect(response.projectsProcessed).toBe(2);
|
||||||
|
expect(response.projectsSucceeded).toBe(1);
|
||||||
|
expect(response.projectsFailed).toBe(1);
|
||||||
|
expect(response.mainPullRequestUrl).toBe('https://github.com/org/repo/pull/1');
|
||||||
|
expect(response.projects).toHaveLength(2);
|
||||||
|
expect(response.projects[0].name).toBe('project1');
|
||||||
|
expect(response.projects[0].success).toBe(true);
|
||||||
|
expect(response.projects[0].filesWritten).toBe(1);
|
||||||
|
expect(response.projects[0].filesRemoved).toBe(0);
|
||||||
|
expect(response.projects[1].name).toBe('project2');
|
||||||
|
expect(response.projects[1].success).toBe(false);
|
||||||
|
expect(response.projects[1].error).toBe('Something went wrong');
|
||||||
|
expect(response.projects[1].filesWritten).toBe(0);
|
||||||
|
expect(response.projects[1].filesRemoved).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle empty results array', () => {
|
||||||
|
// Arrange
|
||||||
|
const results: ProcessResult[] = [];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const response: HttpResponse = formatHttpResponse(results);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(response.success).toBe(true);
|
||||||
|
expect(response.projectsProcessed).toBe(0);
|
||||||
|
expect(response.projectsSucceeded).toBe(0);
|
||||||
|
expect(response.projectsFailed).toBe(0);
|
||||||
|
expect(response.mainPullRequestUrl).toBeUndefined();
|
||||||
|
expect(response.projects).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle undefined filesWritten and filesRemoved', () => {
|
||||||
|
// Arrange
|
||||||
|
const results: ProcessResult[] = [
|
||||||
|
{
|
||||||
|
project: { name: 'project1', path: '/path/to/project1' },
|
||||||
|
success: true
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const response: HttpResponse = formatHttpResponse(results);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(response.success).toBe(true);
|
||||||
|
expect(response.projectsProcessed).toBe(1);
|
||||||
|
expect(response.projectsSucceeded).toBe(1);
|
||||||
|
expect(response.projectsFailed).toBe(0);
|
||||||
|
expect(response.projects[0].filesWritten).toBe(0);
|
||||||
|
expect(response.projects[0].filesRemoved).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Import the HTTP and CloudEvent handlers
|
||||||
|
import { http, cloudEvent } from '@google-cloud/functions-framework';
|
||||||
|
|
||||||
|
// Mock the functions-framework
|
||||||
|
jest.mock('@google-cloud/functions-framework', () => {
|
||||||
|
return {
|
||||||
|
http: jest.fn(),
|
||||||
|
cloudEvent: jest.fn(),
|
||||||
|
CloudEvent: jest.fn()
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('HTTP endpoint handler', () => {
|
||||||
|
let httpHandler: Function;
|
||||||
|
let mockReq: any;
|
||||||
|
let mockRes: any;
|
||||||
|
let mockProcessorInstance: any;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
// Reset mocks
|
||||||
|
jest.clearAllMocks();
|
||||||
|
|
||||||
|
// Capture the HTTP handler function when it's registered
|
||||||
|
(http as jest.Mock).mockImplementation((name, handler) => {
|
||||||
|
httpHandler = handler;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Re-import the index to trigger the HTTP handler registration
|
||||||
|
jest.isolateModules(() => {
|
||||||
|
require('../index');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create mock request and response objects
|
||||||
|
mockReq = {};
|
||||||
|
mockRes = {
|
||||||
|
status: jest.fn().mockReturnThis(),
|
||||||
|
json: jest.fn()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get the mock ProcessorService instance
|
||||||
|
mockProcessorInstance = new ProcessorService();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return successful response when processing succeeds', async () => {
|
||||||
|
// Arrange
|
||||||
|
const mockResults: ProcessResult[] = [
|
||||||
|
{
|
||||||
|
project: { name: 'project1', path: '/path/to/project1' },
|
||||||
|
success: true,
|
||||||
|
filesWritten: ['file1.ts'],
|
||||||
|
pullRequestUrl: 'https://github.com/org/repo/pull/1'
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
mockProcessorInstance.processProjects.mockResolvedValue(mockResults);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await httpHandler(mockReq, mockRes);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mockProcessorInstance.processProjects).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||||
|
expect(mockRes.json).toHaveBeenCalledWith(expect.objectContaining({
|
||||||
|
success: true,
|
||||||
|
projectsProcessed: 1,
|
||||||
|
projectsSucceeded: 1,
|
||||||
|
projectsFailed: 0
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return error response when processing fails', async () => {
|
||||||
|
// Arrange
|
||||||
|
const mockError = new Error('Processing failed');
|
||||||
|
mockProcessorInstance.processProjects.mockRejectedValue(mockError);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await httpHandler(mockReq, mockRes);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mockProcessorInstance.processProjects).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||||
|
expect(mockRes.json).toHaveBeenCalledWith(expect.objectContaining({
|
||||||
|
success: false,
|
||||||
|
error: 'Processing failed'
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Cloud Event handler', () => {
|
||||||
|
let cloudEventHandler: Function;
|
||||||
|
let mockEvent: any;
|
||||||
|
let mockProcessorInstance: any;
|
||||||
|
let consoleLogSpy: jest.SpyInstance;
|
||||||
|
let consoleErrorSpy: jest.SpyInstance;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
// Reset mocks
|
||||||
|
jest.clearAllMocks();
|
||||||
|
|
||||||
|
// Spy on console methods
|
||||||
|
consoleLogSpy = jest.spyOn(console, 'log').mockImplementation();
|
||||||
|
consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
|
||||||
|
|
||||||
|
// Capture the Cloud Event handler function when it's registered
|
||||||
|
(cloudEvent as jest.Mock).mockImplementation((name, handler) => {
|
||||||
|
cloudEventHandler = handler;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Re-import the index to trigger the Cloud Event handler registration
|
||||||
|
jest.isolateModules(() => {
|
||||||
|
require('../index');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create mock event object
|
||||||
|
mockEvent = { type: 'test-event' };
|
||||||
|
|
||||||
|
// Get the mock ProcessorService instance
|
||||||
|
mockProcessorInstance = new ProcessorService();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
// Restore console methods
|
||||||
|
consoleLogSpy.mockRestore();
|
||||||
|
consoleErrorSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should process projects successfully', async () => {
|
||||||
|
// Arrange
|
||||||
|
mockProcessorInstance.processProjects.mockResolvedValue([]);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await cloudEventHandler(mockEvent);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mockProcessorInstance.processProjects).toHaveBeenCalledTimes(1);
|
||||||
|
expect(consoleLogSpy).toHaveBeenCalledWith('Received event:', 'test-event');
|
||||||
|
expect(consoleLogSpy).toHaveBeenCalledWith('Processing completed successfully');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle errors and rethrow them', async () => {
|
||||||
|
// Arrange
|
||||||
|
const mockError = new Error('Processing failed');
|
||||||
|
mockProcessorInstance.processProjects.mockRejectedValue(mockError);
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
await expect(cloudEventHandler(mockEvent)).rejects.toThrow('Processing failed');
|
||||||
|
expect(mockProcessorInstance.processProjects).toHaveBeenCalledTimes(1);
|
||||||
|
expect(consoleErrorSpy).toHaveBeenCalledWith('Error processing projects:', mockError);
|
||||||
|
});
|
||||||
|
});
|
@ -0,0 +1,277 @@
|
|||||||
|
import { ProcessorService } from '../../services/processor-service';
|
||||||
|
import { ProjectService } from '../../services/project-service';
|
||||||
|
import { ProjectTestSpecsService } from '../../services/project-test-specs-service';
|
||||||
|
import { ProcessResult } from '../../types';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
// Mock the shared-functions module
|
||||||
|
jest.mock('shared-functions', () => {
|
||||||
|
return {
|
||||||
|
RepositoryService: jest.fn().mockImplementation(() => {
|
||||||
|
return {
|
||||||
|
cloneMainRepository: jest.fn().mockResolvedValue('/mock/repo/path'),
|
||||||
|
cloneProjectRepository: jest.fn().mockResolvedValue('/mock/project/repo/path'),
|
||||||
|
createBranch: jest.fn().mockResolvedValue(undefined),
|
||||||
|
commitChanges: jest.fn().mockResolvedValue(undefined),
|
||||||
|
pushChanges: jest.fn().mockResolvedValue(undefined),
|
||||||
|
generateGitPatch: jest.fn().mockResolvedValue('mock git patch')
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
PullRequestService: jest.fn().mockImplementation(() => {
|
||||||
|
return {
|
||||||
|
createPullRequest: jest.fn().mockResolvedValue('https://github.com/org/repo/pull/1')
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
GeminiService: jest.fn().mockImplementation(() => {
|
||||||
|
return {
|
||||||
|
generatePullRequestDescription: jest.fn().mockResolvedValue('Mock PR description')
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the ProjectService
|
||||||
|
jest.mock('../../services/project-service', () => {
|
||||||
|
return {
|
||||||
|
ProjectService: jest.fn().mockImplementation(() => {
|
||||||
|
return {
|
||||||
|
findProjects: jest.fn(),
|
||||||
|
readProjectInfo: jest.fn(),
|
||||||
|
readProjectGuidelines: jest.fn()
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the ProjectTestSpecsService
|
||||||
|
jest.mock('../../services/project-test-specs-service', () => {
|
||||||
|
return {
|
||||||
|
ProjectTestSpecsService: jest.fn().mockImplementation(() => {
|
||||||
|
return {
|
||||||
|
processProject: jest.fn()
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the config module
|
||||||
|
jest.mock('../../config', () => {
|
||||||
|
return {
|
||||||
|
validateConfig: jest.fn(),
|
||||||
|
DRY_RUN_SKIP_COMMITS: false,
|
||||||
|
DRY_RUN_SKIP_GEMINI: false,
|
||||||
|
getGiteaCredentials: jest.fn().mockReturnValue({
|
||||||
|
type: 'token',
|
||||||
|
token: 'mock-gitea-token'
|
||||||
|
}),
|
||||||
|
getGithubCredentials: jest.fn().mockReturnValue({
|
||||||
|
type: 'token',
|
||||||
|
token: 'mock-github-token'
|
||||||
|
}),
|
||||||
|
getMainRepoCredentials: jest.fn().mockReturnValue({
|
||||||
|
type: 'token',
|
||||||
|
token: 'mock-main-repo-token'
|
||||||
|
}),
|
||||||
|
MAIN_REPO_URL: 'https://github.com/org/repo',
|
||||||
|
USE_LOCAL_REPO: false,
|
||||||
|
GOOGLE_CLOUD_PROJECT_ID: 'mock-project-id',
|
||||||
|
GOOGLE_CLOUD_LOCATION: 'mock-location',
|
||||||
|
GEMINI_MODEL: 'mock-model'
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('ProcessorService', () => {
|
||||||
|
let processorService: ProcessorService;
|
||||||
|
let mockProjectService: jest.Mocked<ProjectService>;
|
||||||
|
let mockProjectTestSpecsService: jest.Mocked<ProjectTestSpecsService>;
|
||||||
|
let consoleLogSpy: jest.SpyInstance;
|
||||||
|
let consoleErrorSpy: jest.SpyInstance;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
// Reset mocks
|
||||||
|
jest.clearAllMocks();
|
||||||
|
|
||||||
|
// Spy on console methods
|
||||||
|
consoleLogSpy = jest.spyOn(console, 'log').mockImplementation();
|
||||||
|
consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
|
||||||
|
|
||||||
|
// Create a new instance of the service
|
||||||
|
processorService = new ProcessorService();
|
||||||
|
|
||||||
|
// Get the mock instances and set up direct access to their methods
|
||||||
|
mockProjectService = (processorService as any).projectService;
|
||||||
|
|
||||||
|
// Get the mock ProjectTestSpecsService instance
|
||||||
|
// We need to mock the constructor to return our mock instance
|
||||||
|
const mockInstance = {
|
||||||
|
processProject: jest.fn(),
|
||||||
|
projectService: {},
|
||||||
|
sharedRepositoryService: {},
|
||||||
|
processTestSpec: jest.fn(),
|
||||||
|
collectRelevantFiles: jest.fn(),
|
||||||
|
generateImplementations: jest.fn()
|
||||||
|
};
|
||||||
|
(ProjectTestSpecsService as jest.Mock).mockImplementation(() => mockInstance);
|
||||||
|
mockProjectTestSpecsService = mockInstance as unknown as jest.Mocked<ProjectTestSpecsService>;
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
// Restore console methods
|
||||||
|
consoleLogSpy.mockRestore();
|
||||||
|
consoleErrorSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('processProjects', () => {
|
||||||
|
test('should process projects successfully', async () => {
|
||||||
|
// Arrange
|
||||||
|
const mockProjects = [
|
||||||
|
{ name: 'project1', path: '/path/to/project1', repoUrl: 'https://github.com/org/project1', repoHost: 'github.com' },
|
||||||
|
{ name: 'project2', path: '/path/to/project2', repoUrl: 'https://gitea.example.com/org/project2', repoHost: 'gitea.example.com' }
|
||||||
|
];
|
||||||
|
|
||||||
|
mockProjectService.findProjects.mockResolvedValue(mockProjects);
|
||||||
|
|
||||||
|
const mockProcessResult1: ProcessResult = {
|
||||||
|
project: mockProjects[0],
|
||||||
|
success: true,
|
||||||
|
filesWritten: ['file1.ts'],
|
||||||
|
filesRemoved: [],
|
||||||
|
gitPatch: 'mock git patch 1'
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockProcessResult2: ProcessResult = {
|
||||||
|
project: mockProjects[1],
|
||||||
|
success: true,
|
||||||
|
filesWritten: ['file2.ts'],
|
||||||
|
filesRemoved: [],
|
||||||
|
gitPatch: 'mock git patch 2'
|
||||||
|
};
|
||||||
|
|
||||||
|
mockProjectTestSpecsService.processProject
|
||||||
|
.mockResolvedValueOnce(mockProcessResult1)
|
||||||
|
.mockResolvedValueOnce(mockProcessResult2);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const results = await processorService.processProjects();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(results).toHaveLength(2);
|
||||||
|
expect(results[0].project.name).toBe('project1');
|
||||||
|
expect(results[0].success).toBe(true);
|
||||||
|
expect(results[0].pullRequestUrl).toBe('https://github.com/org/repo/pull/1');
|
||||||
|
expect(results[1].project.name).toBe('project2');
|
||||||
|
expect(results[1].success).toBe(true);
|
||||||
|
expect(results[1].pullRequestUrl).toBe('https://github.com/org/repo/pull/1');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle errors when processing projects', async () => {
|
||||||
|
// Arrange
|
||||||
|
const mockProjects = [
|
||||||
|
{ name: 'project1', path: '/path/to/project1', repoUrl: 'https://github.com/org/project1', repoHost: 'github.com' },
|
||||||
|
{ name: 'project2', path: '/path/to/project2', repoUrl: 'https://gitea.example.com/org/project2', repoHost: 'gitea.example.com' }
|
||||||
|
];
|
||||||
|
|
||||||
|
mockProjectService.findProjects.mockResolvedValue(mockProjects);
|
||||||
|
|
||||||
|
const mockProcessResult1: ProcessResult = {
|
||||||
|
project: mockProjects[0],
|
||||||
|
success: true,
|
||||||
|
filesWritten: ['file1.ts'],
|
||||||
|
filesRemoved: [],
|
||||||
|
gitPatch: 'mock git patch 1'
|
||||||
|
};
|
||||||
|
|
||||||
|
mockProjectTestSpecsService.processProject
|
||||||
|
.mockResolvedValueOnce(mockProcessResult1)
|
||||||
|
.mockRejectedValueOnce(new Error('Processing failed for project2'));
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const results = await processorService.processProjects();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(results).toHaveLength(2);
|
||||||
|
expect(results[0].project.name).toBe('project1');
|
||||||
|
expect(results[0].success).toBe(true);
|
||||||
|
expect(results[1].project.name).toBe('project2');
|
||||||
|
expect(results[1].success).toBe(false);
|
||||||
|
expect(results[1].error).toBe('Processing failed for project2');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle project with no repository URL', async () => {
|
||||||
|
// Arrange
|
||||||
|
const mockProjects = [
|
||||||
|
{ name: 'project1', path: '/path/to/project1', repoHost: 'github.com' } // No repoUrl
|
||||||
|
];
|
||||||
|
|
||||||
|
mockProjectService.findProjects.mockResolvedValue(mockProjects);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const results = await processorService.processProjects();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].project.name).toBe('project1');
|
||||||
|
expect(results[0].success).toBe(false);
|
||||||
|
expect(results[0].error).toBe('No repository URL found');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle project with unsupported repository host', async () => {
|
||||||
|
// Arrange
|
||||||
|
const mockProjects = [
|
||||||
|
{ name: 'project1', path: '/path/to/project1', repoUrl: 'https://gitlab.com/org/project1', repoHost: 'gitlab.com' }
|
||||||
|
];
|
||||||
|
|
||||||
|
mockProjectService.findProjects.mockResolvedValue(mockProjects);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const results = await processorService.processProjects();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0].project.name).toBe('project1');
|
||||||
|
expect(results[0].success).toBe(false);
|
||||||
|
expect(results[0].error).toBe('Unsupported repository host: gitlab.com');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should skip commits when DRY_RUN_SKIP_COMMITS is true', async () => {
|
||||||
|
// This test is simpler to implement by directly testing the processProject method
|
||||||
|
// with DRY_RUN_SKIP_COMMITS set to true
|
||||||
|
|
||||||
|
// Arrange
|
||||||
|
const mockProject = {
|
||||||
|
name: 'project1',
|
||||||
|
path: '/path/to/project1',
|
||||||
|
repoUrl: 'https://github.com/org/project1',
|
||||||
|
repoHost: 'github.com'
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockProcessResult: ProcessResult = {
|
||||||
|
project: mockProject,
|
||||||
|
success: true,
|
||||||
|
filesWritten: ['file1.ts'],
|
||||||
|
filesRemoved: [],
|
||||||
|
gitPatch: 'mock git patch'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock the ProjectTestSpecsService to return a successful result
|
||||||
|
mockProjectTestSpecsService.processProject.mockResolvedValue(mockProcessResult);
|
||||||
|
|
||||||
|
// Mock the config module
|
||||||
|
const configModule = require('../../config');
|
||||||
|
const originalDryRunSkipCommits = configModule.DRY_RUN_SKIP_COMMITS;
|
||||||
|
configModule.DRY_RUN_SKIP_COMMITS = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Act
|
||||||
|
const result = await processorService.processProject(mockProject, '/mock/repo/path');
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
expect(result.pullRequestUrl).toContain('(DRY RUN)');
|
||||||
|
} finally {
|
||||||
|
// Restore the original value
|
||||||
|
configModule.DRY_RUN_SKIP_COMMITS = originalDryRunSkipCommits;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -0,0 +1,123 @@
|
|||||||
|
import { ProjectService } from '../../services/project-service';
|
||||||
|
import { Project } from 'shared-functions';
|
||||||
|
|
||||||
|
// Mock the shared-functions module
|
||||||
|
jest.mock('shared-functions', () => {
|
||||||
|
return {
|
||||||
|
ProjectService: jest.fn().mockImplementation(() => {
|
||||||
|
return {
|
||||||
|
findProjects: jest.fn(),
|
||||||
|
readProjectInfo: jest.fn(),
|
||||||
|
readProjectGuidelines: jest.fn()
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('ProjectService', () => {
|
||||||
|
let projectService: ProjectService;
|
||||||
|
let mockSharedProjectService: any;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
// Reset mocks
|
||||||
|
jest.clearAllMocks();
|
||||||
|
|
||||||
|
// Create a new instance of the service
|
||||||
|
projectService = new ProjectService();
|
||||||
|
|
||||||
|
// Get the mock SharedProjectService instance
|
||||||
|
mockSharedProjectService = (projectService as any).sharedProjectService;
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('findProjects', () => {
|
||||||
|
test('should delegate to SharedProjectService.findProjects with correct parameters', async () => {
|
||||||
|
// Arrange
|
||||||
|
const promptsDir = '/path/to/prompts';
|
||||||
|
const mockProjects: Project[] = [
|
||||||
|
{ name: 'project1', path: '/path/to/project1' },
|
||||||
|
{ name: 'project2', path: '/path/to/project2' }
|
||||||
|
];
|
||||||
|
|
||||||
|
mockSharedProjectService.findProjects.mockResolvedValue(mockProjects);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await projectService.findProjects(promptsDir);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mockSharedProjectService.findProjects).toHaveBeenCalledWith(
|
||||||
|
promptsDir,
|
||||||
|
'test-spec-to-test-implementation'
|
||||||
|
);
|
||||||
|
expect(result).toEqual(mockProjects);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle errors from SharedProjectService.findProjects', async () => {
|
||||||
|
// Arrange
|
||||||
|
const promptsDir = '/path/to/prompts';
|
||||||
|
const mockError = new Error('Failed to find projects');
|
||||||
|
|
||||||
|
mockSharedProjectService.findProjects.mockRejectedValue(mockError);
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
await expect(projectService.findProjects(promptsDir)).rejects.toThrow('Failed to find projects');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('readProjectInfo', () => {
|
||||||
|
test('should delegate to SharedProjectService.readProjectInfo with correct parameters', async () => {
|
||||||
|
// Arrange
|
||||||
|
const projectPath = '/path/to/project';
|
||||||
|
const projectName = 'project1';
|
||||||
|
const mockProject: Project = { name: projectName, path: projectPath };
|
||||||
|
|
||||||
|
mockSharedProjectService.readProjectInfo.mockResolvedValue(mockProject);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await projectService.readProjectInfo(projectPath, projectName);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mockSharedProjectService.readProjectInfo).toHaveBeenCalledWith(projectPath, projectName);
|
||||||
|
expect(result).toEqual(mockProject);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle errors from SharedProjectService.readProjectInfo', async () => {
|
||||||
|
// Arrange
|
||||||
|
const projectPath = '/path/to/project';
|
||||||
|
const projectName = 'project1';
|
||||||
|
const mockError = new Error('Failed to read project info');
|
||||||
|
|
||||||
|
mockSharedProjectService.readProjectInfo.mockRejectedValue(mockError);
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
await expect(projectService.readProjectInfo(projectPath, projectName)).rejects.toThrow('Failed to read project info');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('readProjectGuidelines', () => {
|
||||||
|
test('should delegate to SharedProjectService.readProjectGuidelines with correct parameters', async () => {
|
||||||
|
// Arrange
|
||||||
|
const projectPath = '/path/to/project';
|
||||||
|
const mockGuidelines = 'Project guidelines content';
|
||||||
|
|
||||||
|
mockSharedProjectService.readProjectGuidelines.mockResolvedValue(mockGuidelines);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await projectService.readProjectGuidelines(projectPath);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mockSharedProjectService.readProjectGuidelines).toHaveBeenCalledWith(projectPath);
|
||||||
|
expect(result).toEqual(mockGuidelines);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle errors from SharedProjectService.readProjectGuidelines', async () => {
|
||||||
|
// Arrange
|
||||||
|
const projectPath = '/path/to/project';
|
||||||
|
const mockError = new Error('Failed to read project guidelines');
|
||||||
|
|
||||||
|
mockSharedProjectService.readProjectGuidelines.mockRejectedValue(mockError);
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
await expect(projectService.readProjectGuidelines(projectPath)).rejects.toThrow('Failed to read project guidelines');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -0,0 +1,311 @@
|
|||||||
|
import {ProjectTestSpecsService} from '../../services/project-test-specs-service';
|
||||||
|
import {ProjectService} from '../../services/project-service';
|
||||||
|
import {ProcessResult, TestSpecImplementationStatus} from '../../types';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
// Mock the fs module
|
||||||
|
jest.mock('fs', () => {
|
||||||
|
return {
|
||||||
|
...jest.requireActual('fs'),
|
||||||
|
existsSync: jest.fn(),
|
||||||
|
readFileSync: jest.fn(),
|
||||||
|
readdirSync: jest.fn(),
|
||||||
|
statSync: jest.fn()
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the path module
|
||||||
|
jest.mock('path', () => {
|
||||||
|
const originalPath = jest.requireActual('path');
|
||||||
|
return {
|
||||||
|
...originalPath,
|
||||||
|
join: jest.fn().mockImplementation((...args) => args.join('/')),
|
||||||
|
relative: jest.fn().mockImplementation((from, to) => to.replace(from + '/', ''))
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the ProjectService
|
||||||
|
jest.mock('../../services/project-service', () => {
|
||||||
|
return {
|
||||||
|
ProjectService: jest.fn().mockImplementation(() => {
|
||||||
|
return {
|
||||||
|
findProjects: jest.fn(),
|
||||||
|
readProjectInfo: jest.fn(),
|
||||||
|
readProjectGuidelines: jest.fn()
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the shared-functions module
|
||||||
|
jest.mock('shared-functions', () => {
|
||||||
|
return {
|
||||||
|
RepositoryService: jest.fn().mockImplementation(() => {
|
||||||
|
return {
|
||||||
|
generateGitPatch: jest.fn().mockResolvedValue('mock git patch')
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
GeminiFileSystemService: jest.fn().mockImplementation(() => {
|
||||||
|
return {
|
||||||
|
processModelStream: jest.fn()
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the config module
|
||||||
|
jest.mock('../../config', () => {
|
||||||
|
return {
|
||||||
|
DRY_RUN_SKIP_GEMINI: false,
|
||||||
|
GOOGLE_CLOUD_PROJECT_ID: 'mock-project-id',
|
||||||
|
GOOGLE_CLOUD_LOCATION: 'mock-location',
|
||||||
|
GEMINI_MODEL: 'mock-model'
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('ProjectTestSpecsService', () => {
|
||||||
|
let projectTestSpecsService: ProjectTestSpecsService;
|
||||||
|
let mockProjectService: jest.Mocked<ProjectService>;
|
||||||
|
let mockSharedRepositoryService: any;
|
||||||
|
let mockGeminiFileSystemService: any;
|
||||||
|
let consoleLogSpy: jest.SpyInstance;
|
||||||
|
let consoleErrorSpy: jest.SpyInstance;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
// Reset mocks
|
||||||
|
jest.clearAllMocks();
|
||||||
|
|
||||||
|
// Spy on console methods
|
||||||
|
consoleLogSpy = jest.spyOn(console, 'log').mockImplementation();
|
||||||
|
consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
|
||||||
|
|
||||||
|
// Create a new instance of the service
|
||||||
|
projectTestSpecsService = new ProjectTestSpecsService();
|
||||||
|
|
||||||
|
// Get the mock instances
|
||||||
|
mockProjectService = (projectTestSpecsService as any).projectService;
|
||||||
|
mockSharedRepositoryService = (projectTestSpecsService as any).sharedRepositoryService;
|
||||||
|
|
||||||
|
// Mock the GeminiFileSystemService
|
||||||
|
const {GeminiFileSystemService} = require('shared-functions');
|
||||||
|
mockGeminiFileSystemService = new GeminiFileSystemService();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
// Restore console methods
|
||||||
|
consoleLogSpy.mockRestore();
|
||||||
|
consoleErrorSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('processProject', () => {
|
||||||
|
test('should process project successfully', async () => {
|
||||||
|
// Arrange
|
||||||
|
const project = {name: 'project1', path: '/path/to/project1'};
|
||||||
|
const projectRepoPath = '/path/to/project/repo';
|
||||||
|
const projectGuidelines = 'Project guidelines content';
|
||||||
|
|
||||||
|
mockProjectService.readProjectGuidelines.mockResolvedValue(projectGuidelines);
|
||||||
|
|
||||||
|
// Mock the generateImplementation method
|
||||||
|
const mockImplementationResult = {
|
||||||
|
text: 'Generated implementation',
|
||||||
|
decision: {decision: 'create' as TestSpecImplementationStatus, reason: 'New test spec'},
|
||||||
|
filesWritten: ['file1.ts', 'file2.ts'],
|
||||||
|
filesDeleted: []
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock the private methods using spyOn
|
||||||
|
jest.spyOn(projectTestSpecsService as any, 'processTestSpec').mockResolvedValue({
|
||||||
|
project,
|
||||||
|
success: true,
|
||||||
|
filesWritten: mockImplementationResult.filesWritten,
|
||||||
|
filesRemoved: mockImplementationResult.filesDeleted
|
||||||
|
});
|
||||||
|
|
||||||
|
mockSharedRepositoryService.generateGitPatch.mockResolvedValue('mock git patch');
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await projectTestSpecsService.processProject(project, projectRepoPath);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mockProjectService.readProjectGuidelines).toHaveBeenCalledWith(project.path);
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
expect(result.filesWritten).toEqual(mockImplementationResult.filesWritten);
|
||||||
|
expect(result.filesRemoved).toEqual(mockImplementationResult.filesDeleted);
|
||||||
|
expect(result.gitPatch).toBe('mock git patch');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle errors when processing project', async () => {
|
||||||
|
// Arrange
|
||||||
|
const project = {name: 'project1', path: '/path/to/project1'};
|
||||||
|
const projectRepoPath = '/path/to/project/repo';
|
||||||
|
|
||||||
|
mockProjectService.readProjectGuidelines.mockRejectedValue(new Error('Failed to read guidelines'));
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await projectTestSpecsService.processProject(project, projectRepoPath);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result.success).toBe(false);
|
||||||
|
expect(result.error).toBe('Failed to read guidelines');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle errors when generating git patch', async () => {
|
||||||
|
// Arrange
|
||||||
|
const project = {name: 'project1', path: '/path/to/project1'};
|
||||||
|
const projectRepoPath = '/path/to/project/repo';
|
||||||
|
const projectGuidelines = 'Project guidelines content';
|
||||||
|
|
||||||
|
mockProjectService.readProjectGuidelines.mockResolvedValue(projectGuidelines);
|
||||||
|
|
||||||
|
// Mock the processTestSpec method
|
||||||
|
jest.spyOn(projectTestSpecsService as any, 'processTestSpec').mockResolvedValue({
|
||||||
|
project,
|
||||||
|
success: true,
|
||||||
|
filesWritten: ['file1.ts'],
|
||||||
|
filesRemoved: []
|
||||||
|
});
|
||||||
|
|
||||||
|
mockSharedRepositoryService.generateGitPatch.mockRejectedValue(new Error('Failed to generate git patch'));
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await projectTestSpecsService.processProject(project, projectRepoPath);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result.success).toBe(true);
|
||||||
|
expect(result.filesWritten).toEqual(['file1.ts']);
|
||||||
|
expect(result.gitPatch).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('collectRelevantFiles', () => {
|
||||||
|
test('should collect relevant files from project directory', async () => {
|
||||||
|
// Arrange
|
||||||
|
const project = {name: 'project1', path: '/path/to/project1'};
|
||||||
|
const projectRepoPath = '/path/to/project/repo';
|
||||||
|
|
||||||
|
// Mock fs.existsSync to return true for specific files
|
||||||
|
(fs.existsSync as jest.Mock).mockImplementation((filePath) => {
|
||||||
|
if (filePath.includes('nitro-it/src/test/java/be/fiscalteam/nitro/bdd')) return true;
|
||||||
|
return filePath.includes('INFO.md') || filePath.includes('README.md');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock fs.readFileSync to return file content
|
||||||
|
(fs.readFileSync as jest.Mock).mockImplementation((filePath) => {
|
||||||
|
if (filePath.includes('INFO.md')) return 'INFO.md content';
|
||||||
|
if (filePath.includes('README.md')) return 'README.md content';
|
||||||
|
return '';
|
||||||
|
});
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await (projectTestSpecsService as any).collectRelevantFiles(project, projectRepoPath);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(Object.keys(result)).toContain('INFO.md');
|
||||||
|
expect(Object.keys(result)).toContain('README.md');
|
||||||
|
expect(result['INFO.md']).toBe('INFO.md content');
|
||||||
|
expect(result['README.md']).toBe('README.md content');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle errors when collecting relevant files', async () => {
|
||||||
|
// Arrange
|
||||||
|
const project = {name: 'project1', path: '/path/to/project1'};
|
||||||
|
const projectRepoPath = '/path/to/project/repo';
|
||||||
|
|
||||||
|
// Mock fs.existsSync to throw an error
|
||||||
|
(fs.existsSync as jest.Mock).mockImplementation(() => {
|
||||||
|
throw new Error('File system error');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await (projectTestSpecsService as any).collectRelevantFiles(project, projectRepoPath);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toEqual({});
|
||||||
|
expect(consoleErrorSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('generateImplementation', () => {
|
||||||
|
test('should generate implementation using Gemini', async () => {
|
||||||
|
// Arrange
|
||||||
|
const projectRepoPath = '/path/to/project/repo';
|
||||||
|
const guidelines = 'Project guidelines content';
|
||||||
|
const relevantFiles = {
|
||||||
|
'INFO.md': 'INFO.md content',
|
||||||
|
'README.md': 'README.md content'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock GeminiFileSystemService.processModelStream
|
||||||
|
const mockProcessModelStreamResult = {
|
||||||
|
text: 'Generated implementation',
|
||||||
|
decision: {decision: 'create', reason: 'New test spec'},
|
||||||
|
filesWritten: ['file1.ts', 'file2.ts'],
|
||||||
|
filesDeleted: []
|
||||||
|
};
|
||||||
|
|
||||||
|
mockGeminiFileSystemService.processModelStream.mockResolvedValue(mockProcessModelStreamResult);
|
||||||
|
|
||||||
|
// Mock the GeminiFileSystemService constructor
|
||||||
|
const {GeminiFileSystemService} = require('shared-functions');
|
||||||
|
GeminiFileSystemService.mockImplementation(() => mockGeminiFileSystemService);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await (projectTestSpecsService as any).generateImplementations(
|
||||||
|
projectRepoPath,
|
||||||
|
guidelines,
|
||||||
|
relevantFiles
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mockGeminiFileSystemService.processModelStream).toHaveBeenCalledWith(
|
||||||
|
guidelines,
|
||||||
|
expect.stringContaining('INFO.md'),
|
||||||
|
projectRepoPath
|
||||||
|
);
|
||||||
|
expect(result.text).toBe('Generated implementation');
|
||||||
|
expect(result.decision.decision).toBe('create');
|
||||||
|
expect(result.filesWritten).toEqual(['file1.ts', 'file2.ts']);
|
||||||
|
expect(result.filesDeleted).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return mock implementation when DRY_RUN_SKIP_GEMINI is true', async () => {
|
||||||
|
// Arrange
|
||||||
|
const projectRepoPath = '/path/to/project/repo';
|
||||||
|
const guidelines = 'Project guidelines content';
|
||||||
|
const relevantFiles = {
|
||||||
|
'INFO.md': 'INFO.md content',
|
||||||
|
'README.md': 'README.md content'
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock the config module to set DRY_RUN_SKIP_GEMINI to true
|
||||||
|
jest.resetModules();
|
||||||
|
jest.doMock('../../config', () => {
|
||||||
|
return {
|
||||||
|
DRY_RUN_SKIP_GEMINI: true,
|
||||||
|
GOOGLE_CLOUD_PROJECT_ID: 'mock-project-id',
|
||||||
|
GOOGLE_CLOUD_LOCATION: 'mock-location',
|
||||||
|
GEMINI_MODEL: 'mock-model'
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Re-import the service to get the updated config
|
||||||
|
const {ProjectTestSpecsService} = require('../../services/project-test-specs-service');
|
||||||
|
const serviceWithDryRun = new ProjectTestSpecsService();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await (serviceWithDryRun).generateImplementations(
|
||||||
|
projectRepoPath,
|
||||||
|
guidelines,
|
||||||
|
relevantFiles
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result.text).toContain('DRY RUN');
|
||||||
|
expect(result.decision.decision).toBe('create');
|
||||||
|
expect(result.filesWritten).toEqual([]);
|
||||||
|
expect(result.filesDeleted).toEqual([]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -0,0 +1,16 @@
|
|||||||
|
/**
|
||||||
|
* Jest setup file
|
||||||
|
*
|
||||||
|
* This file is executed before each test file is run.
|
||||||
|
* It can be used to set up global test environment configurations.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Suppress console output during tests
|
||||||
|
global.console = {
|
||||||
|
...console,
|
||||||
|
log: jest.fn(),
|
||||||
|
error: jest.fn(),
|
||||||
|
warn: jest.fn(),
|
||||||
|
info: jest.fn(),
|
||||||
|
debug: jest.fn(),
|
||||||
|
};
|
116
src/functions/test-spec-to-test-implementation/src/config.ts
Normal file
116
src/functions/test-spec-to-test-implementation/src/config.ts
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
/**
|
||||||
|
* Configuration module for loading environment variables
|
||||||
|
*/
|
||||||
|
import * as dotenv from 'dotenv';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
// Load environment variables from .env file
|
||||||
|
dotenv.config({ path: path.resolve(__dirname, '../.env') });
|
||||||
|
|
||||||
|
// Main repository configuration
|
||||||
|
export const MAIN_REPO_URL = process.env.MAIN_REPO_URL || '';
|
||||||
|
export const MAIN_REPO_TOKEN = process.env.MAIN_REPO_TOKEN;
|
||||||
|
export const MAIN_REPO_USERNAME = process.env.MAIN_REPO_USERNAME;
|
||||||
|
export const MAIN_REPO_PASSWORD = process.env.MAIN_REPO_PASSWORD;
|
||||||
|
|
||||||
|
// GitHub credentials
|
||||||
|
export const GITHUB_TOKEN = process.env.GITHUB_TOKEN;
|
||||||
|
export const GITHUB_USERNAME = process.env.GITHUB_USERNAME;
|
||||||
|
export const GITHUB_PASSWORD = process.env.GITHUB_PASSWORD;
|
||||||
|
|
||||||
|
// Gitea credentials
|
||||||
|
export const GITEA_USERNAME = process.env.GITEA_USERNAME;
|
||||||
|
export const GITEA_PASSWORD = process.env.GITEA_PASSWORD;
|
||||||
|
|
||||||
|
// Google Cloud configuration
|
||||||
|
export const GOOGLE_CLOUD_PROJECT_ID = process.env.GOOGLE_CLOUD_PROJECT_ID || '';
|
||||||
|
export const GOOGLE_CLOUD_LOCATION = process.env.GOOGLE_CLOUD_LOCATION || 'us-central1';
|
||||||
|
export const GEMINI_MODEL = process.env.GEMINI_MODEL || 'gemini-1.5-pro';
|
||||||
|
export const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY;
|
||||||
|
|
||||||
|
// Function configuration
|
||||||
|
export const DEBUG = process.env.DEBUG === 'true';
|
||||||
|
export const USE_LOCAL_REPO = process.env.USE_LOCAL_REPO === 'true';
|
||||||
|
export const DRY_RUN_SKIP_GEMINI = process.env.DRY_RUN_SKIP_GEMINI === 'true';
|
||||||
|
export const DRY_RUN_SKIP_COMMITS = process.env.DRY_RUN_SKIP_COMMITS === 'true';
|
||||||
|
|
||||||
|
// Validate required configuration
|
||||||
|
export function validateConfig(): void {
|
||||||
|
const missingVars: string[] = [];
|
||||||
|
|
||||||
|
// Only check for main repo URL and credentials if not using local repo
|
||||||
|
if (!USE_LOCAL_REPO) {
|
||||||
|
if (!MAIN_REPO_URL) {
|
||||||
|
missingVars.push('MAIN_REPO_URL');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!MAIN_REPO_TOKEN && (!MAIN_REPO_USERNAME || !MAIN_REPO_PASSWORD)) {
|
||||||
|
missingVars.push('MAIN_REPO_TOKEN or MAIN_REPO_USERNAME/MAIN_REPO_PASSWORD');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!GOOGLE_CLOUD_PROJECT_ID) {
|
||||||
|
missingVars.push('GOOGLE_CLOUD_PROJECT_ID');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (missingVars.length > 0) {
|
||||||
|
throw new Error(`Missing required environment variables: ${missingVars.join(', ')}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get repository credentials for the main repository
|
||||||
|
export function getMainRepoCredentials(): { type: 'username-password' | 'token'; username?: string; password?: string; token?: string } {
|
||||||
|
if (USE_LOCAL_REPO) {
|
||||||
|
// Return dummy credentials when using local repo
|
||||||
|
return {
|
||||||
|
type: 'token',
|
||||||
|
token: 'dummy-token-for-local-repo'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (MAIN_REPO_TOKEN) {
|
||||||
|
return {
|
||||||
|
type: 'token',
|
||||||
|
token: MAIN_REPO_TOKEN
|
||||||
|
};
|
||||||
|
} else if (MAIN_REPO_USERNAME && MAIN_REPO_PASSWORD) {
|
||||||
|
return {
|
||||||
|
type: 'username-password',
|
||||||
|
username: MAIN_REPO_USERNAME,
|
||||||
|
password: MAIN_REPO_PASSWORD
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('No credentials available for the main repository');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get GitHub credentials
|
||||||
|
export function getGithubCredentials(): { type: 'username-password' | 'token'; username?: string; password?: string; token?: string } | undefined {
|
||||||
|
if (GITHUB_TOKEN) {
|
||||||
|
return {
|
||||||
|
type: 'token',
|
||||||
|
token: GITHUB_TOKEN
|
||||||
|
};
|
||||||
|
} else if (GITHUB_USERNAME && GITHUB_PASSWORD) {
|
||||||
|
return {
|
||||||
|
type: 'username-password',
|
||||||
|
username: GITHUB_USERNAME,
|
||||||
|
password: GITHUB_PASSWORD
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get Gitea credentials
|
||||||
|
export function getGiteaCredentials(): { type: 'username-password'; username: string; password: string } | undefined {
|
||||||
|
if (GITEA_USERNAME && GITEA_PASSWORD) {
|
||||||
|
return {
|
||||||
|
type: 'username-password',
|
||||||
|
username: GITEA_USERNAME,
|
||||||
|
password: GITEA_PASSWORD
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
86
src/functions/test-spec-to-test-implementation/src/index.ts
Normal file
86
src/functions/test-spec-to-test-implementation/src/index.ts
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
import {CloudEvent, cloudEvent, http} from '@google-cloud/functions-framework';
|
||||||
|
import {ProcessorService} from './services/processor-service';
|
||||||
|
import {validateConfig, DRY_RUN_SKIP_GEMINI, DRY_RUN_SKIP_COMMITS} from './config';
|
||||||
|
import {ProcessResult, HttpResponse, ProjectSummary} from './types';
|
||||||
|
|
||||||
|
// Validate configuration on startup
|
||||||
|
try {
|
||||||
|
validateConfig();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Configuration error:', error instanceof Error ? error.message : String(error));
|
||||||
|
// Don't throw here to allow the function to start, but it will fail when executed
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format process results into a concise HTTP response
|
||||||
|
* @param results Process results from the processor service
|
||||||
|
* @returns Formatted HTTP response
|
||||||
|
*/
|
||||||
|
export function formatHttpResponse(results: ProcessResult[]): HttpResponse {
|
||||||
|
// Count successful and failed projects
|
||||||
|
const projectsSucceeded = results.filter(r => !r.error).length;
|
||||||
|
const projectsFailed = results.filter(r => !!r.error).length;
|
||||||
|
|
||||||
|
// Find main PR URL if any
|
||||||
|
const mainPullRequestUrl = results.find(r => r.pullRequestUrl)?.pullRequestUrl;
|
||||||
|
|
||||||
|
// Format project summaries
|
||||||
|
const projects: ProjectSummary[] = results.map(result => {
|
||||||
|
return {
|
||||||
|
name: result.project.name,
|
||||||
|
success: result.success ?? false,
|
||||||
|
error: result.error,
|
||||||
|
filesWritten: result.filesWritten?.length ?? 0,
|
||||||
|
filesRemoved: result.filesRemoved?.length ?? 0,
|
||||||
|
pullRequestUrl: result.pullRequestUrl,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: projectsFailed === 0,
|
||||||
|
projectsProcessed: results.length,
|
||||||
|
projectsSucceeded,
|
||||||
|
projectsFailed,
|
||||||
|
mainPullRequestUrl,
|
||||||
|
projects
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HTTP endpoint for the test-spec-to-test-implementation function
|
||||||
|
*/
|
||||||
|
http('testSpecToTestImplementationHttp', async (req, res): Promise<void> => {
|
||||||
|
try {
|
||||||
|
const processor = new ProcessorService();
|
||||||
|
const results = await processor.processProjects();
|
||||||
|
const response = formatHttpResponse(results);
|
||||||
|
|
||||||
|
res.status(200).json(response);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error processing projects:', error);
|
||||||
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||||
|
res.status(500).json({
|
||||||
|
success: false,
|
||||||
|
projectsProcessed: 0,
|
||||||
|
projectsSucceeded: 0,
|
||||||
|
projectsFailed: 1,
|
||||||
|
projects: [],
|
||||||
|
error: errorMessage
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cloud Event handler for the test-spec-to-test-implementation function
|
||||||
|
*/
|
||||||
|
cloudEvent('testSpecToTestImplementationEvent', async (event: CloudEvent<any>): Promise<void> => {
|
||||||
|
try {
|
||||||
|
console.log('Received event:', event.type);
|
||||||
|
const processor = new ProcessorService();
|
||||||
|
await processor.processProjects();
|
||||||
|
console.log('Processing completed successfully');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error processing projects:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
});
|
@ -0,0 +1,263 @@
|
|||||||
|
/**
|
||||||
|
* Service for orchestrating the entire process
|
||||||
|
*/
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as os from 'os';
|
||||||
|
import {ProcessResult, RepoCredentials} from '../types';
|
||||||
|
import {
|
||||||
|
RepositoryService as SharedRepositoryService,
|
||||||
|
PullRequestService as SharedPullRequestService,
|
||||||
|
GeminiService, Project
|
||||||
|
} from 'shared-functions';
|
||||||
|
import {ProjectService} from './project-service';
|
||||||
|
import {ProjectTestSpecsService} from './project-test-specs-service';
|
||||||
|
import {
|
||||||
|
DRY_RUN_SKIP_COMMITS,
|
||||||
|
getGiteaCredentials,
|
||||||
|
getGithubCredentials,
|
||||||
|
getMainRepoCredentials,
|
||||||
|
MAIN_REPO_URL,
|
||||||
|
USE_LOCAL_REPO,
|
||||||
|
validateConfig,
|
||||||
|
GOOGLE_CLOUD_PROJECT_ID,
|
||||||
|
GOOGLE_CLOUD_LOCATION,
|
||||||
|
GEMINI_MODEL,
|
||||||
|
DRY_RUN_SKIP_GEMINI
|
||||||
|
} from '../config';
|
||||||
|
|
||||||
|
export class ProcessorService {
|
||||||
|
private sharedRepositoryService: SharedRepositoryService;
|
||||||
|
private projectService: ProjectService;
|
||||||
|
private sharedPullRequestService: SharedPullRequestService;
|
||||||
|
private geminiService: GeminiService;
|
||||||
|
private mainRepoUrl: string;
|
||||||
|
private mainRepoCredentials: RepoCredentials;
|
||||||
|
private giteaCredentials?: RepoCredentials;
|
||||||
|
private githubCredentials?: RepoCredentials;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
// Validate configuration
|
||||||
|
validateConfig();
|
||||||
|
|
||||||
|
// Initialize services
|
||||||
|
const repoBaseDir = path.join(os.tmpdir(), 'test-spec-to-test-implementation');
|
||||||
|
this.sharedRepositoryService = new SharedRepositoryService(repoBaseDir);
|
||||||
|
this.projectService = new ProjectService();
|
||||||
|
this.sharedPullRequestService = new SharedPullRequestService();
|
||||||
|
this.geminiService = new GeminiService(
|
||||||
|
GOOGLE_CLOUD_PROJECT_ID,
|
||||||
|
GOOGLE_CLOUD_LOCATION,
|
||||||
|
GEMINI_MODEL,
|
||||||
|
DRY_RUN_SKIP_GEMINI
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get main repository URL and credentials only if not using local repo
|
||||||
|
if (!USE_LOCAL_REPO) {
|
||||||
|
this.mainRepoUrl = MAIN_REPO_URL;
|
||||||
|
this.mainRepoCredentials = getMainRepoCredentials();
|
||||||
|
} else {
|
||||||
|
// Set dummy values when using local repo
|
||||||
|
this.mainRepoUrl = '';
|
||||||
|
this.mainRepoCredentials = getMainRepoCredentials();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize other credentials
|
||||||
|
this.githubCredentials = getGithubCredentials();
|
||||||
|
this.giteaCredentials = getGiteaCredentials();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get credentials for a project based on its repository host
|
||||||
|
* @param project Project information
|
||||||
|
* @returns Credentials for the project repository
|
||||||
|
*/
|
||||||
|
private getCredentialsForProject(project: Project): RepoCredentials {
|
||||||
|
if (!project.repoHost) {
|
||||||
|
throw new Error(`Repository host not found for project ${project.name}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (project.repoHost.includes('github.com')) {
|
||||||
|
if (!this.githubCredentials) {
|
||||||
|
throw new Error('GitHub credentials not found');
|
||||||
|
}
|
||||||
|
return this.githubCredentials;
|
||||||
|
} else if (project.repoHost.includes('gitea')) {
|
||||||
|
if (!this.giteaCredentials) {
|
||||||
|
throw new Error('Gitea credentials not found');
|
||||||
|
}
|
||||||
|
return this.giteaCredentials;
|
||||||
|
} else {
|
||||||
|
throw new Error(`Unsupported repository host: ${project.repoHost}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process all projects in the main repository
|
||||||
|
* @returns Array of process results
|
||||||
|
*/
|
||||||
|
async processProjects(): Promise<ProcessResult[]> {
|
||||||
|
const results: ProcessResult[] = [];
|
||||||
|
let mainRepoPath: string;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Use local repository or clone the main repository
|
||||||
|
if (USE_LOCAL_REPO) {
|
||||||
|
console.log('Using local repository path');
|
||||||
|
// When running with functions-framework, we need to navigate up to the project root
|
||||||
|
// Check if we're in the test-spec-to-test-implementation directory and navigate up if needed
|
||||||
|
const currentDir = process.cwd();
|
||||||
|
if (currentDir.endsWith('test-spec-to-test-implementation')) {
|
||||||
|
mainRepoPath = path.resolve(currentDir, '../../..');
|
||||||
|
} else {
|
||||||
|
mainRepoPath = currentDir;
|
||||||
|
}
|
||||||
|
console.log(`Resolved local repository path: ${mainRepoPath}`);
|
||||||
|
} else {
|
||||||
|
console.log(`Cloning main repository: ${this.mainRepoUrl}`);
|
||||||
|
mainRepoPath = await this.sharedRepositoryService.cloneMainRepository(
|
||||||
|
this.mainRepoUrl,
|
||||||
|
this.mainRepoCredentials
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find all projects in the test-spec-to-test-implementation directory
|
||||||
|
const promptsDir = path.join(mainRepoPath, 'src', 'prompts', 'test-spec-to-test-implementation');
|
||||||
|
console.log(`Finding projects in: ${promptsDir}`);
|
||||||
|
const projects = await this.projectService.findProjects(promptsDir);
|
||||||
|
|
||||||
|
console.log(`Found ${projects.length} projects`);
|
||||||
|
|
||||||
|
// Log details of each project
|
||||||
|
if (projects.length > 0) {
|
||||||
|
console.log('Projects found:');
|
||||||
|
projects.forEach((project, index) => {
|
||||||
|
console.log(` ${index + 1}. ${project.name} (${project.path})`);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.log('No projects found. Check if the test-spec-to-test-implementation directory exists and contains project folders.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each project
|
||||||
|
console.log('Starting to process projects...');
|
||||||
|
for (const project of projects) {
|
||||||
|
try {
|
||||||
|
console.log(`Starting processing of project: ${project.name}`);
|
||||||
|
const result = await this.processProject(project, mainRepoPath);
|
||||||
|
console.log(`Finished processing project: ${project.name}`);
|
||||||
|
results.push(result);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error processing project ${project.name}:`, error);
|
||||||
|
results.push({
|
||||||
|
project,
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.log(`Finished processing all ${projects.length} projects`);
|
||||||
|
|
||||||
|
return results;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error processing projects:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process a single project
|
||||||
|
* @param project Project information
|
||||||
|
* @param mainRepoPath Path to the main repository
|
||||||
|
* @returns Process result
|
||||||
|
*/
|
||||||
|
async processProject(project: Project, mainRepoPath: string): Promise<ProcessResult> {
|
||||||
|
console.log(`Processing project: ${project.name}`);
|
||||||
|
|
||||||
|
// Skip if no repository URL
|
||||||
|
if (!project.repoUrl) {
|
||||||
|
console.log(`Skipping project ${project.name}: No repository URL found`);
|
||||||
|
return {
|
||||||
|
project,
|
||||||
|
success: false,
|
||||||
|
error: "No repository URL found"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get credentials for the project
|
||||||
|
const credentials = this.getCredentialsForProject(project);
|
||||||
|
|
||||||
|
// Clone the project repository
|
||||||
|
console.log(`Cloning project repository: ${project.repoUrl}`);
|
||||||
|
const projectRepoPath = await this.sharedRepositoryService.cloneProjectRepository(project, credentials);
|
||||||
|
|
||||||
|
// Create a ProjectTestSpecsService to handle the project
|
||||||
|
const projectTestSpecsService = new ProjectTestSpecsService();
|
||||||
|
|
||||||
|
// Process test specs within the project
|
||||||
|
console.log(`Processing test specs within project: ${project.name}`);
|
||||||
|
const result = await projectTestSpecsService.processProject(project, projectRepoPath);
|
||||||
|
|
||||||
|
// If no test specs were processed or there was an error, return early
|
||||||
|
if (!result.success || result.error) {
|
||||||
|
console.error(`Failure for project ${project.name}: ${result.error}`);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip creating commits/PRs if dry run is enabled
|
||||||
|
if (DRY_RUN_SKIP_COMMITS) {
|
||||||
|
console.log(`[DRY RUN] Skipping commit and PR creation for project ${project.name}`);
|
||||||
|
return {
|
||||||
|
...result,
|
||||||
|
success: true,
|
||||||
|
pullRequestUrl: 'https://example.com/mock-pr-url (DRY RUN)'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new branch for changes
|
||||||
|
const branchName = `implement-test-specs-${new Date().toISOString().split('T')[0]}`;
|
||||||
|
await this.sharedRepositoryService.createBranch(projectRepoPath, branchName);
|
||||||
|
|
||||||
|
// Commit changes
|
||||||
|
await this.sharedRepositoryService.commitChanges(
|
||||||
|
projectRepoPath,
|
||||||
|
`Implement test specs: ${new Date().toISOString().split('T')[0]}`
|
||||||
|
);
|
||||||
|
|
||||||
|
// Push changes
|
||||||
|
await this.sharedRepositoryService.pushChanges(projectRepoPath, branchName, credentials);
|
||||||
|
|
||||||
|
// Generate PR description using Gemini
|
||||||
|
const description = await this.geminiService.generatePullRequestDescription(
|
||||||
|
"Test spec implementation",
|
||||||
|
result.gitPatch
|
||||||
|
);
|
||||||
|
|
||||||
|
// Generate PR title
|
||||||
|
const title = `Implement test specs: ${new Date().toISOString().split('T')[0]}`;
|
||||||
|
|
||||||
|
// Create pull request
|
||||||
|
const pullRequestUrl = await this.sharedPullRequestService.createPullRequest(
|
||||||
|
project,
|
||||||
|
branchName,
|
||||||
|
credentials,
|
||||||
|
title,
|
||||||
|
description
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(`Created pull request: ${pullRequestUrl}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...result,
|
||||||
|
success: true,
|
||||||
|
pullRequestUrl
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error processing project ${project.name}:`, error);
|
||||||
|
return {
|
||||||
|
project,
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,40 @@
|
|||||||
|
/**
|
||||||
|
* Service for handling project operations
|
||||||
|
*/
|
||||||
|
import {Project, ProjectService as SharedProjectService} from 'shared-functions';
|
||||||
|
|
||||||
|
export class ProjectService {
|
||||||
|
private sharedProjectService: SharedProjectService;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.sharedProjectService = new SharedProjectService();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all projects in the test-spec-to-test-implementation directory
|
||||||
|
* @param promptsDir Path to the test-spec-to-test-implementation directory
|
||||||
|
* @returns Array of projects
|
||||||
|
*/
|
||||||
|
async findProjects(promptsDir: string): Promise<Project[]> {
|
||||||
|
return this.sharedProjectService.findProjects(promptsDir, 'test-spec-to-test-implementation');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read project information from INFO.md
|
||||||
|
* @param projectPath Path to the project directory
|
||||||
|
* @param projectName Name of the project
|
||||||
|
* @returns Project information
|
||||||
|
*/
|
||||||
|
async readProjectInfo(projectPath: string, projectName: string): Promise<Project> {
|
||||||
|
return this.sharedProjectService.readProjectInfo(projectPath, projectName);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read AI guidelines for a project
|
||||||
|
* @param projectPath Path to the project directory
|
||||||
|
* @returns AI guidelines content
|
||||||
|
*/
|
||||||
|
async readProjectGuidelines(projectPath: string): Promise<string> {
|
||||||
|
return this.sharedProjectService.readProjectGuidelines(projectPath);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,215 @@
|
|||||||
|
/**
|
||||||
|
* Service for handling test spec operations within a project
|
||||||
|
*/
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
import {ProcessResult, TestSpecImplementationStatus} from '../types';
|
||||||
|
import {ProjectService} from './project-service';
|
||||||
|
import {DRY_RUN_SKIP_GEMINI} from '../config';
|
||||||
|
import {GeminiFileSystemService, Project, RepositoryService as SharedRepositoryService,} from 'shared-functions';
|
||||||
|
|
||||||
|
export class ProjectTestSpecsService {
|
||||||
|
private projectService: ProjectService;
|
||||||
|
private sharedRepositoryService: SharedRepositoryService;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.projectService = new ProjectService();
|
||||||
|
this.sharedRepositoryService = new SharedRepositoryService(
|
||||||
|
path.join(require('os').tmpdir(), 'test-spec-to-test-implementation')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process the project test specs
|
||||||
|
* @param project Project to process
|
||||||
|
* @param projectRepoPath Path to the project repository
|
||||||
|
* @returns Process result
|
||||||
|
*/
|
||||||
|
async processProject(project: Project, projectRepoPath: string): Promise<ProcessResult> {
|
||||||
|
console.log(`ProjectTestSpecsService: Processing project ${project.name}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Read project guidelines
|
||||||
|
const projectGuidelines = await this.projectService.readProjectGuidelines(project.path);
|
||||||
|
|
||||||
|
const result = await this.processTestSpec(project, projectRepoPath, projectGuidelines);
|
||||||
|
|
||||||
|
// Generate git patch if any files were written
|
||||||
|
let gitPatch: string | undefined = undefined;
|
||||||
|
|
||||||
|
if ((result.filesWritten?.length ?? 0) > 0) {
|
||||||
|
try {
|
||||||
|
console.log(`Generating git patch for project ${project.name} with ${result.filesWritten} files written`);
|
||||||
|
gitPatch = await this.sharedRepositoryService.generateGitPatch(projectRepoPath);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error generating git patch for project ${project.name}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...result,
|
||||||
|
gitPatch
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error processing project ${project.name}:`, error);
|
||||||
|
return {
|
||||||
|
project: project,
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process a test spec using Gemini
|
||||||
|
* @param project Project containing the test spec
|
||||||
|
* @param projectRepoPath Path to the project repository
|
||||||
|
* @param testSpec Test spec to process
|
||||||
|
* @param projectGuidelines Project guidelines
|
||||||
|
* @returns Result of the processing
|
||||||
|
*/
|
||||||
|
private async processTestSpec(
|
||||||
|
project: Project,
|
||||||
|
projectRepoPath: string,
|
||||||
|
projectGuidelines: string
|
||||||
|
): Promise<ProcessResult> {
|
||||||
|
try {
|
||||||
|
// Collect all relevant files from the project directory
|
||||||
|
const relevantFiles = await this.collectRelevantFiles(project, projectRepoPath);
|
||||||
|
|
||||||
|
// Let Gemini generate the implementation
|
||||||
|
const result = await this.generateImplementations(
|
||||||
|
projectRepoPath,
|
||||||
|
projectGuidelines,
|
||||||
|
relevantFiles
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check status consistency
|
||||||
|
if (result.decision?.decision === 'skip') {
|
||||||
|
if (result.filesWritten.length > 0) {
|
||||||
|
throw new Error(`Skip decision with files written: ${result.filesWritten.join(', ')}`);
|
||||||
|
}
|
||||||
|
if (result.filesDeleted.length > 0) {
|
||||||
|
throw new Error(`Skip decision with files deleted: ${result.filesDeleted.join(', ')}`);
|
||||||
|
}
|
||||||
|
} else if (result.decision?.decision === 'create' || result.decision?.decision === 'update') {
|
||||||
|
if (result.filesWritten.length === 0) {
|
||||||
|
throw new Error(`${result.decision.decision} decision with no files written`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`ProjectTestSpecsService: Completed processing project (Status: ${result.decision?.decision}, Files written: ${result.filesWritten.length})`);
|
||||||
|
return {
|
||||||
|
project: project,
|
||||||
|
success: true,
|
||||||
|
filesWritten: result.filesWritten,
|
||||||
|
filesRemoved: result.filesDeleted,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error processing project ${project.name}:`, error);
|
||||||
|
return {
|
||||||
|
project: project,
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collect relevant files from the project directory
|
||||||
|
* @param project The project info
|
||||||
|
* @param projectRepoPath Path to the project repository
|
||||||
|
* @param testSpec The test spec being processed (for logging purposes)
|
||||||
|
* @returns Object containing file contents
|
||||||
|
*/
|
||||||
|
private async collectRelevantFiles(project: Project, projectRepoPath: string): Promise<Record<string, string>> {
|
||||||
|
const relevantFiles: Record<string, string> = {};
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Add project guidelines
|
||||||
|
const guidelinePaths = project.aiGuidelines?.split(',') ?? [
|
||||||
|
'INFO.md', 'README.md', 'GUIDELINES.md', 'ARCHITECTURE.md', 'IMPLEMENTATION.md'
|
||||||
|
];
|
||||||
|
guidelinePaths
|
||||||
|
.map(g => g.trim())
|
||||||
|
.forEach(fileName => {
|
||||||
|
const filePath = path.join(projectRepoPath, fileName);
|
||||||
|
if (fs.existsSync(filePath)) {
|
||||||
|
relevantFiles[fileName] = fs.readFileSync(filePath, 'utf-8');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`ProjectTestSpecsService: Collected ${Object.keys(relevantFiles).length} relevant files for ${project.name}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error collecting relevant files for ${project.name}:`, error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return relevantFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate implementation using Gemini API
|
||||||
|
* @param projectRepoPath Path to the project repository
|
||||||
|
* @param guidelines Project guidelines
|
||||||
|
* @param testSpec Test spec to implement
|
||||||
|
* @param relevantFiles Additional relevant files to include in the prompt
|
||||||
|
* @returns Object containing the generated text, parsed decision, and files written/deleted
|
||||||
|
*/
|
||||||
|
private async generateImplementations(
|
||||||
|
projectRepoPath: string,
|
||||||
|
guidelines: string,
|
||||||
|
relevantFiles: Record<string, string> = {}
|
||||||
|
): Promise<{
|
||||||
|
text: string;
|
||||||
|
decision?: { decision: TestSpecImplementationStatus; reason: string };
|
||||||
|
filesWritten: string[];
|
||||||
|
filesDeleted: string[];
|
||||||
|
}> {
|
||||||
|
const currentDate = new Date().toISOString();
|
||||||
|
|
||||||
|
// If dry run is enabled, return a mock implementation
|
||||||
|
if (DRY_RUN_SKIP_GEMINI) {
|
||||||
|
const mockText = `# Generated by test-spec-to-test-implementation on ${currentDate} (DRY RUN)`;
|
||||||
|
return {
|
||||||
|
text: mockText,
|
||||||
|
decision: {
|
||||||
|
decision: 'create',
|
||||||
|
reason: 'This is a mock decision for dry run mode'
|
||||||
|
},
|
||||||
|
filesWritten: [],
|
||||||
|
filesDeleted: []
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import required configuration
|
||||||
|
const {GOOGLE_CLOUD_PROJECT_ID, GOOGLE_CLOUD_LOCATION, GEMINI_MODEL} = require('../config');
|
||||||
|
|
||||||
|
// Initialize the GeminiFileSystemService directly
|
||||||
|
const geminiFileSystemService = new GeminiFileSystemService(
|
||||||
|
GOOGLE_CLOUD_PROJECT_ID,
|
||||||
|
GOOGLE_CLOUD_LOCATION,
|
||||||
|
GEMINI_MODEL,
|
||||||
|
DRY_RUN_SKIP_GEMINI
|
||||||
|
);
|
||||||
|
|
||||||
|
const additionalContent = relevantFiles ?? {};
|
||||||
|
let contentString = ""
|
||||||
|
for (const [filename, content] of Object.entries(additionalContent)) {
|
||||||
|
contentString += `\n--- ${filename} ---\n${content}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process the model stream
|
||||||
|
const result = await geminiFileSystemService.processModelStream(
|
||||||
|
guidelines,
|
||||||
|
contentString,
|
||||||
|
projectRepoPath
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
text: result.text,
|
||||||
|
decision: result.decision as { decision: TestSpecImplementationStatus; reason: string },
|
||||||
|
filesWritten: result.filesWritten,
|
||||||
|
filesDeleted: result.filesDeleted
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
64
src/functions/test-spec-to-test-implementation/src/types.ts
Normal file
64
src/functions/test-spec-to-test-implementation/src/types.ts
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
/**
|
||||||
|
* Type definitions for the test-spec-to-test-implementation function
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {Project} from "shared-functions";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Status of a test spec implementation
|
||||||
|
*/
|
||||||
|
export type TestSpecImplementationStatus = 'create' | 'update' | 'skip';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test specification from a feature file
|
||||||
|
*/
|
||||||
|
export interface TestSpec {
|
||||||
|
name: string;
|
||||||
|
path: string;
|
||||||
|
featureName: string;
|
||||||
|
content: string;
|
||||||
|
implementationPath?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export interface RepoCredentials {
|
||||||
|
type: 'username-password' | 'token';
|
||||||
|
username?: string;
|
||||||
|
password?: string;
|
||||||
|
token?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ProcessResult {
|
||||||
|
project: Project;
|
||||||
|
success?: boolean;
|
||||||
|
pullRequestUrl?: string;
|
||||||
|
error?: string;
|
||||||
|
gitPatch?: string;
|
||||||
|
filesWritten?: string[];
|
||||||
|
filesRemoved?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HTTP response format for the API
|
||||||
|
*/
|
||||||
|
export interface HttpResponse {
|
||||||
|
success: boolean;
|
||||||
|
projectsProcessed: number;
|
||||||
|
projectsSucceeded: number;
|
||||||
|
projectsFailed: number;
|
||||||
|
mainPullRequestUrl?: string;
|
||||||
|
projects: ProjectSummary[];
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Summary of a project's processing results
|
||||||
|
*/
|
||||||
|
export interface ProjectSummary {
|
||||||
|
name: string;
|
||||||
|
success: boolean;
|
||||||
|
error?: string;
|
||||||
|
filesWritten: number;
|
||||||
|
filesRemoved: number;
|
||||||
|
pullRequestUrl?: string;
|
||||||
|
}
|
18
src/functions/test-spec-to-test-implementation/tsconfig.json
Normal file
18
src/functions/test-spec-to-test-implementation/tsconfig.json
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2020",
|
||||||
|
"module": "CommonJS",
|
||||||
|
"outDir": "dist",
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"skipLibCheck": true
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"src/**/*"
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
"node_modules",
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
|
}
|
@ -3,7 +3,7 @@
|
|||||||
Nitro backend server in quarkus
|
Nitro backend server in quarkus
|
||||||
|
|
||||||
- [x] Repo host: https://gitea.fteamdev.valuya.be/
|
- [x] Repo host: https://gitea.fteamdev.valuya.be/
|
||||||
- [x] Repo url: https://gitea.fteamdev.valuya.be/fiscalteam/nitro-back.git
|
- [x] Repo url: https://gitea.fteamdev.valuya.be/cghislai/nitro-back.git
|
||||||
- [x] Target branch: main
|
- [x] Target branch: main
|
||||||
- [ ] AI guidelines:
|
- [ ] AI guidelines:
|
||||||
- [x] Jira component: nitro
|
- [x] Jira component: nitro
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
Nitro backend server in quarkus
|
Nitro backend server in quarkus
|
||||||
|
|
||||||
- [x] Repo host: https://gitea.fteamdev.valuya.be/
|
- [x] Repo host: https://gitea.fteamdev.valuya.be/
|
||||||
- [x] Repo url: https://gitea.fteamdev.valuya.be/fiscalteam/nitro-back.git
|
- [x] Repo url: https://gitea.fteamdev.valuya.be/cghislai/nitro-back.git
|
||||||
- [x] Target branch: main
|
- [x] Target branch: main
|
||||||
- [ ] AI guidelines:
|
- [ ] AI guidelines:
|
||||||
- [x] Jira component: nitro
|
- [x] Jira component: nitro
|
||||||
|
Loading…
x
Reference in New Issue
Block a user