WIP
This commit is contained in:
parent
0aed81875f
commit
ce388e07e4
@ -2,12 +2,16 @@ import * as fs from 'fs';
|
|||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import { ProcessorService } from '../processor-service';
|
import { ProcessorService } from '../processor-service';
|
||||||
import { ProjectService } from '../project-service';
|
import { ProjectService } from '../project-service';
|
||||||
import { RepositoryService } from '../repository-service';
|
|
||||||
import { Project, Workitem, ProcessResult } from '../../types';
|
import { Project, Workitem, ProcessResult } from '../../types';
|
||||||
|
import {
|
||||||
|
RepositoryService as SharedRepositoryService,
|
||||||
|
PullRequestService as SharedPullRequestService,
|
||||||
|
GeminiService
|
||||||
|
} from 'shared-functions';
|
||||||
|
|
||||||
// Mock dependencies
|
// Mock dependencies
|
||||||
jest.mock('../project-service');
|
jest.mock('../project-service');
|
||||||
jest.mock('../repository-service');
|
jest.mock('shared-functions');
|
||||||
jest.mock('../../config', () => ({
|
jest.mock('../../config', () => ({
|
||||||
validateConfig: jest.fn(),
|
validateConfig: jest.fn(),
|
||||||
getMainRepoCredentials: jest.fn().mockReturnValue({ type: 'token', token: 'mock-token' }),
|
getMainRepoCredentials: jest.fn().mockReturnValue({ type: 'token', token: 'mock-token' }),
|
||||||
@ -18,19 +22,24 @@ jest.mock('../../config', () => ({
|
|||||||
GOOGLE_CLOUD_LOCATION: 'mock-location',
|
GOOGLE_CLOUD_LOCATION: 'mock-location',
|
||||||
GEMINI_MODEL: 'mock-model',
|
GEMINI_MODEL: 'mock-model',
|
||||||
USE_LOCAL_REPO: false,
|
USE_LOCAL_REPO: false,
|
||||||
DRY_RUN_SKIP_COMMITS: false
|
DRY_RUN_SKIP_COMMITS: false,
|
||||||
|
DRY_RUN_SKIP_GEMINI: false
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe('ProcessorService', () => {
|
describe('ProcessorService', () => {
|
||||||
let processorService: ProcessorService;
|
let processorService: ProcessorService;
|
||||||
let mockProjectService: jest.Mocked<ProjectService>;
|
let mockProjectService: jest.Mocked<ProjectService>;
|
||||||
let mockRepositoryService: jest.Mocked<RepositoryService>;
|
let mockSharedRepositoryService: jest.Mocked<SharedRepositoryService>;
|
||||||
|
let mockSharedPullRequestService: jest.Mocked<SharedPullRequestService>;
|
||||||
|
let mockGeminiService: jest.Mocked<GeminiService>;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
processorService = new ProcessorService();
|
processorService = new ProcessorService();
|
||||||
mockProjectService = ProjectService.prototype as jest.Mocked<ProjectService>;
|
mockProjectService = ProjectService.prototype as jest.Mocked<ProjectService>;
|
||||||
mockRepositoryService = RepositoryService.prototype as jest.Mocked<RepositoryService>;
|
mockSharedRepositoryService = SharedRepositoryService.prototype as jest.Mocked<SharedRepositoryService>;
|
||||||
|
mockSharedPullRequestService = SharedPullRequestService.prototype as jest.Mocked<SharedPullRequestService>;
|
||||||
|
mockGeminiService = GeminiService.prototype as jest.Mocked<GeminiService>;
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('updateWorkitemFilesWithPullRequestUrls', () => {
|
describe('updateWorkitemFilesWithPullRequestUrls', () => {
|
||||||
@ -83,7 +92,7 @@ describe('ProcessorService', () => {
|
|||||||
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
|
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
|
||||||
|
|
||||||
// Verify the method calls
|
// Verify the method calls
|
||||||
expect(mockRepositoryService.createBranch).toHaveBeenCalledWith(
|
expect(mockSharedRepositoryService.createBranch).toHaveBeenCalledWith(
|
||||||
mainRepoPath,
|
mainRepoPath,
|
||||||
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/)
|
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/)
|
||||||
);
|
);
|
||||||
@ -98,12 +107,12 @@ describe('ProcessorService', () => {
|
|||||||
'https://github.com/org/test-project/pull/123'
|
'https://github.com/org/test-project/pull/123'
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(mockRepositoryService.commitChanges).toHaveBeenCalledWith(
|
expect(mockSharedRepositoryService.commitChanges).toHaveBeenCalledWith(
|
||||||
mainRepoPath,
|
mainRepoPath,
|
||||||
expect.stringMatching(/Update workitem files with pull request URLs: \d{4}-\d{2}-\d{2}/)
|
expect.stringMatching(/Update workitem files with pull request URLs: \d{4}-\d{2}-\d{2}/)
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(mockRepositoryService.pushChanges).toHaveBeenCalledWith(
|
expect(mockSharedRepositoryService.pushChanges).toHaveBeenCalledWith(
|
||||||
mainRepoPath,
|
mainRepoPath,
|
||||||
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/),
|
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/),
|
||||||
expect.anything()
|
expect.anything()
|
||||||
@ -158,7 +167,7 @@ describe('ProcessorService', () => {
|
|||||||
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
|
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
|
||||||
|
|
||||||
// Verify the method calls
|
// Verify the method calls
|
||||||
expect(mockRepositoryService.createBranch).toHaveBeenCalledWith(
|
expect(mockSharedRepositoryService.createBranch).toHaveBeenCalledWith(
|
||||||
mainRepoPath,
|
mainRepoPath,
|
||||||
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/)
|
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/)
|
||||||
);
|
);
|
||||||
@ -174,12 +183,12 @@ describe('ProcessorService', () => {
|
|||||||
'https://github.com/org/test-project/pull/123'
|
'https://github.com/org/test-project/pull/123'
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(mockRepositoryService.commitChanges).toHaveBeenCalledWith(
|
expect(mockSharedRepositoryService.commitChanges).toHaveBeenCalledWith(
|
||||||
mainRepoPath,
|
mainRepoPath,
|
||||||
expect.stringMatching(/Update workitem files with pull request URLs: \d{4}-\d{2}-\d{2}/)
|
expect.stringMatching(/Update workitem files with pull request URLs: \d{4}-\d{2}-\d{2}/)
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(mockRepositoryService.pushChanges).toHaveBeenCalledWith(
|
expect(mockSharedRepositoryService.pushChanges).toHaveBeenCalledWith(
|
||||||
mainRepoPath,
|
mainRepoPath,
|
||||||
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/),
|
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/),
|
||||||
expect.anything()
|
expect.anything()
|
||||||
@ -208,10 +217,10 @@ describe('ProcessorService', () => {
|
|||||||
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
|
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
|
||||||
|
|
||||||
// Verify the method calls
|
// Verify the method calls
|
||||||
expect(mockRepositoryService.createBranch).toHaveBeenCalled();
|
expect(mockSharedRepositoryService.createBranch).toHaveBeenCalled();
|
||||||
expect(mockProjectService.updateWorkitemWithPullRequestUrl).not.toHaveBeenCalled();
|
expect(mockProjectService.updateWorkitemWithPullRequestUrl).not.toHaveBeenCalled();
|
||||||
expect(mockRepositoryService.commitChanges).not.toHaveBeenCalled();
|
expect(mockSharedRepositoryService.commitChanges).not.toHaveBeenCalled();
|
||||||
expect(mockRepositoryService.pushChanges).not.toHaveBeenCalled();
|
expect(mockSharedRepositoryService.pushChanges).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle errors when updating workitem files', async () => {
|
it('should handle errors when updating workitem files', async () => {
|
||||||
@ -251,10 +260,10 @@ describe('ProcessorService', () => {
|
|||||||
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
|
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
|
||||||
|
|
||||||
// Verify the method calls
|
// Verify the method calls
|
||||||
expect(mockRepositoryService.createBranch).toHaveBeenCalled();
|
expect(mockSharedRepositoryService.createBranch).toHaveBeenCalled();
|
||||||
expect(mockProjectService.updateWorkitemWithPullRequestUrl).toHaveBeenCalled();
|
expect(mockProjectService.updateWorkitemWithPullRequestUrl).toHaveBeenCalled();
|
||||||
expect(mockRepositoryService.commitChanges).not.toHaveBeenCalled();
|
expect(mockSharedRepositoryService.commitChanges).not.toHaveBeenCalled();
|
||||||
expect(mockRepositoryService.pushChanges).not.toHaveBeenCalled();
|
expect(mockSharedRepositoryService.pushChanges).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import { ProjectService } from '../project-service';
|
import { ProjectService } from '../project-service';
|
||||||
|
import { WorkitemImplementationStatus } from '../../types';
|
||||||
|
|
||||||
// Mock fs and path modules
|
// Mock fs and path modules
|
||||||
jest.mock('fs');
|
jest.mock('fs');
|
||||||
@ -52,8 +53,9 @@ Some existing log content.
|
|||||||
isActive: true
|
isActive: true
|
||||||
};
|
};
|
||||||
|
|
||||||
const status = 'created';
|
const status: WorkitemImplementationStatus = 'create';
|
||||||
const files = ['file1.ts', 'file2.ts'];
|
const filesWritten = ['file1.ts', 'file2.ts'];
|
||||||
|
const filesRemoved: string[] = [];
|
||||||
|
|
||||||
// Mock fs.existsSync to return true for workitem file
|
// Mock fs.existsSync to return true for workitem file
|
||||||
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
||||||
@ -67,7 +69,7 @@ Some existing log content.
|
|||||||
actualContent = content;
|
actualContent = content;
|
||||||
});
|
});
|
||||||
|
|
||||||
await projectService.updateWorkitemWithImplementationLog(workitem, status, files);
|
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
|
||||||
|
|
||||||
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
||||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
||||||
@ -94,9 +96,9 @@ This is a description of the workitem.
|
|||||||
|
|
||||||
### Log
|
### Log
|
||||||
|
|
||||||
${mockTimestamp} - Workitem has been implemented. Created files:
|
${mockTimestamp} - Workitem has been implemented.
|
||||||
- file1.ts
|
- Created file1.ts
|
||||||
- file2.ts
|
- Created file2.ts
|
||||||
|
|
||||||
|
|
||||||
Some existing log content.
|
Some existing log content.
|
||||||
@ -125,8 +127,9 @@ This is a description of the workitem.
|
|||||||
isActive: true
|
isActive: true
|
||||||
};
|
};
|
||||||
|
|
||||||
const status = 'updated';
|
const status: WorkitemImplementationStatus = 'update';
|
||||||
const files = ['file1.ts', 'file2.ts'];
|
const filesWritten = ['file1.ts', 'file2.ts'];
|
||||||
|
const filesRemoved: string[] = [];
|
||||||
|
|
||||||
// Mock fs.existsSync to return true for workitem file
|
// Mock fs.existsSync to return true for workitem file
|
||||||
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
||||||
@ -140,7 +143,7 @@ This is a description of the workitem.
|
|||||||
actualContent = content;
|
actualContent = content;
|
||||||
});
|
});
|
||||||
|
|
||||||
await projectService.updateWorkitemWithImplementationLog(workitem, status, files);
|
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
|
||||||
|
|
||||||
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
||||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
||||||
@ -166,12 +169,11 @@ This is a description of the workitem.
|
|||||||
- [x] Active
|
- [x] Active
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### Log
|
### Log
|
||||||
|
|
||||||
${mockTimestamp} - Workitem has been updated. Modified files:
|
${mockTimestamp} - Workitem has been updated.
|
||||||
- file1.ts
|
- Created file1.ts
|
||||||
- file2.ts
|
- Created file2.ts
|
||||||
`;
|
`;
|
||||||
expect(actualContentFromMock).toEqual(expectedContent);
|
expect(actualContentFromMock).toEqual(expectedContent);
|
||||||
});
|
});
|
||||||
@ -200,8 +202,9 @@ Some existing log content.
|
|||||||
isActive: true
|
isActive: true
|
||||||
};
|
};
|
||||||
|
|
||||||
const status = 'deleted';
|
const status: WorkitemImplementationStatus = 'delete';
|
||||||
const files = ['file1.ts', 'file2.ts'];
|
const filesWritten: string[] = [];
|
||||||
|
const filesRemoved = ['file1.ts', 'file2.ts'];
|
||||||
|
|
||||||
// Mock fs.existsSync to return true for workitem file
|
// Mock fs.existsSync to return true for workitem file
|
||||||
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
||||||
@ -215,7 +218,7 @@ Some existing log content.
|
|||||||
actualContent = content;
|
actualContent = content;
|
||||||
});
|
});
|
||||||
|
|
||||||
await projectService.updateWorkitemWithImplementationLog(workitem, status, files);
|
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
|
||||||
|
|
||||||
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
||||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
||||||
@ -242,9 +245,9 @@ This is a description of the workitem.
|
|||||||
|
|
||||||
### Log
|
### Log
|
||||||
|
|
||||||
${mockTimestamp} - Workitem has been deleted. Removed files:
|
${mockTimestamp} - Workitem has been deleted.
|
||||||
- file1.ts
|
- Removed file1.ts
|
||||||
- file2.ts
|
- Removed file2.ts
|
||||||
|
|
||||||
|
|
||||||
Some existing log content.
|
Some existing log content.
|
||||||
@ -276,8 +279,9 @@ Some existing log content.
|
|||||||
isActive: true
|
isActive: true
|
||||||
};
|
};
|
||||||
|
|
||||||
const status = 'created';
|
const status: WorkitemImplementationStatus = 'create';
|
||||||
const files: string[] = [];
|
const filesWritten: string[] = [];
|
||||||
|
const filesRemoved: string[] = [];
|
||||||
|
|
||||||
// Mock fs.existsSync to return true for workitem file
|
// Mock fs.existsSync to return true for workitem file
|
||||||
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
(fs.existsSync as jest.Mock).mockReturnValue(true);
|
||||||
@ -291,7 +295,7 @@ Some existing log content.
|
|||||||
actualContent = content;
|
actualContent = content;
|
||||||
});
|
});
|
||||||
|
|
||||||
await projectService.updateWorkitemWithImplementationLog(workitem, status, files);
|
await projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved);
|
||||||
|
|
||||||
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
// Verify that fs.existsSync and fs.readFileSync were called with the expected arguments
|
||||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
||||||
@ -318,8 +322,7 @@ This is a description of the workitem.
|
|||||||
|
|
||||||
### Log
|
### Log
|
||||||
|
|
||||||
${mockTimestamp} - Workitem has been implemented. Created files:
|
${mockTimestamp} - Workitem has been implemented.
|
||||||
No files were affected.
|
|
||||||
|
|
||||||
|
|
||||||
Some existing log content.
|
Some existing log content.
|
||||||
@ -338,13 +341,14 @@ Some existing log content.
|
|||||||
isActive: true
|
isActive: true
|
||||||
};
|
};
|
||||||
|
|
||||||
const status = 'created';
|
const status: WorkitemImplementationStatus = 'create';
|
||||||
const files = ['file1.ts', 'file2.ts'];
|
const filesWritten = ['file1.ts', 'file2.ts'];
|
||||||
|
const filesRemoved: string[] = [];
|
||||||
|
|
||||||
// Mock fs.existsSync to return false for workitem file
|
// Mock fs.existsSync to return false for workitem file
|
||||||
(fs.existsSync as jest.Mock).mockReturnValue(false);
|
(fs.existsSync as jest.Mock).mockReturnValue(false);
|
||||||
|
|
||||||
await expect(projectService.updateWorkitemWithImplementationLog(workitem, status, files))
|
await expect(projectService.updateWorkitemWithImplementationLog(workitem, status, filesWritten, filesRemoved))
|
||||||
.rejects.toThrow('Workitem file not found: path/to/workitem.md');
|
.rejects.toThrow('Workitem file not found: path/to/workitem.md');
|
||||||
|
|
||||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
|
||||||
|
@ -2,11 +2,15 @@
|
|||||||
* Service for orchestrating the entire process
|
* Service for orchestrating the entire process
|
||||||
*/
|
*/
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
import * as os from 'os';
|
||||||
import {ProcessResult, Project, RepoCredentials} from '../types';
|
import {ProcessResult, Project, RepoCredentials} from '../types';
|
||||||
import {RepositoryService} from './repository-service';
|
import {
|
||||||
|
RepositoryService as SharedRepositoryService,
|
||||||
|
PullRequestService as SharedPullRequestService,
|
||||||
|
GeminiService
|
||||||
|
} from 'shared-functions';
|
||||||
import {ProjectService} from './project-service';
|
import {ProjectService} from './project-service';
|
||||||
import {PullRequestService} from './pull-request-service';
|
import {ProjectWorkitemsService} from './project-workitems-service';
|
||||||
import {GeminiProjectProcessor} from './gemini-project-processor';
|
|
||||||
import {
|
import {
|
||||||
DRY_RUN_SKIP_COMMITS,
|
DRY_RUN_SKIP_COMMITS,
|
||||||
getGiteaCredentials,
|
getGiteaCredentials,
|
||||||
@ -14,13 +18,18 @@ import {
|
|||||||
getMainRepoCredentials,
|
getMainRepoCredentials,
|
||||||
MAIN_REPO_URL,
|
MAIN_REPO_URL,
|
||||||
USE_LOCAL_REPO,
|
USE_LOCAL_REPO,
|
||||||
validateConfig
|
validateConfig,
|
||||||
|
GOOGLE_CLOUD_PROJECT_ID,
|
||||||
|
GOOGLE_CLOUD_LOCATION,
|
||||||
|
GEMINI_MODEL,
|
||||||
|
DRY_RUN_SKIP_GEMINI
|
||||||
} from '../config';
|
} from '../config';
|
||||||
|
|
||||||
export class ProcessorService {
|
export class ProcessorService {
|
||||||
private repositoryService: RepositoryService;
|
private sharedRepositoryService: SharedRepositoryService;
|
||||||
private projectService: ProjectService;
|
private projectService: ProjectService;
|
||||||
private pullRequestService: PullRequestService;
|
private sharedPullRequestService: SharedPullRequestService;
|
||||||
|
private geminiService: GeminiService;
|
||||||
private mainRepoUrl: string;
|
private mainRepoUrl: string;
|
||||||
private mainRepoCredentials: RepoCredentials;
|
private mainRepoCredentials: RepoCredentials;
|
||||||
private giteaCredentials?: RepoCredentials;
|
private giteaCredentials?: RepoCredentials;
|
||||||
@ -31,9 +40,16 @@ export class ProcessorService {
|
|||||||
validateConfig();
|
validateConfig();
|
||||||
|
|
||||||
// Initialize services
|
// Initialize services
|
||||||
this.repositoryService = new RepositoryService();
|
const repoBaseDir = path.join(os.tmpdir(), 'prompts-to-test-spec');
|
||||||
|
this.sharedRepositoryService = new SharedRepositoryService(repoBaseDir);
|
||||||
this.projectService = new ProjectService();
|
this.projectService = new ProjectService();
|
||||||
this.pullRequestService = new PullRequestService();
|
this.sharedPullRequestService = new SharedPullRequestService();
|
||||||
|
this.geminiService = new GeminiService(
|
||||||
|
GOOGLE_CLOUD_PROJECT_ID,
|
||||||
|
GOOGLE_CLOUD_LOCATION,
|
||||||
|
GEMINI_MODEL,
|
||||||
|
DRY_RUN_SKIP_GEMINI
|
||||||
|
);
|
||||||
|
|
||||||
// Get main repository URL and credentials only if not using local repo
|
// Get main repository URL and credentials only if not using local repo
|
||||||
if (!USE_LOCAL_REPO) {
|
if (!USE_LOCAL_REPO) {
|
||||||
@ -98,7 +114,7 @@ export class ProcessorService {
|
|||||||
console.log(`Resolved local repository path: ${mainRepoPath}`);
|
console.log(`Resolved local repository path: ${mainRepoPath}`);
|
||||||
} else {
|
} else {
|
||||||
console.log(`Cloning main repository: ${this.mainRepoUrl}`);
|
console.log(`Cloning main repository: ${this.mainRepoUrl}`);
|
||||||
mainRepoPath = await this.repositoryService.cloneMainRepository(
|
mainRepoPath = await this.sharedRepositoryService.cloneMainRepository(
|
||||||
this.mainRepoUrl,
|
this.mainRepoUrl,
|
||||||
this.mainRepoCredentials
|
this.mainRepoCredentials
|
||||||
);
|
);
|
||||||
@ -165,7 +181,7 @@ export class ProcessorService {
|
|||||||
|
|
||||||
// Create a new branch for the changes
|
// Create a new branch for the changes
|
||||||
const branchName = `update-workitem-pr-urls-${new Date().toISOString().split('T')[0]}`;
|
const branchName = `update-workitem-pr-urls-${new Date().toISOString().split('T')[0]}`;
|
||||||
await this.repositoryService.createBranch(mainRepoPath, branchName);
|
await this.sharedRepositoryService.createBranch(mainRepoPath, branchName);
|
||||||
|
|
||||||
// Update each workitem file with its pull request URL
|
// Update each workitem file with its pull request URL
|
||||||
for (const result of results) {
|
for (const result of results) {
|
||||||
@ -193,13 +209,13 @@ export class ProcessorService {
|
|||||||
// Commit and push changes if any workitems were updated
|
// Commit and push changes if any workitems were updated
|
||||||
if (updatedAnyWorkitem) {
|
if (updatedAnyWorkitem) {
|
||||||
console.log('Committing changes to workitem files...');
|
console.log('Committing changes to workitem files...');
|
||||||
await this.repositoryService.commitChanges(
|
await this.sharedRepositoryService.commitChanges(
|
||||||
mainRepoPath,
|
mainRepoPath,
|
||||||
`Update workitem files with pull request URLs: ${new Date().toISOString().split('T')[0]}`
|
`Update workitem files with pull request URLs: ${new Date().toISOString().split('T')[0]}`
|
||||||
);
|
);
|
||||||
|
|
||||||
console.log('Pushing changes to main repository...');
|
console.log('Pushing changes to main repository...');
|
||||||
await this.repositoryService.pushChanges(mainRepoPath, branchName, this.mainRepoCredentials);
|
await this.sharedRepositoryService.pushChanges(mainRepoPath, branchName, this.mainRepoCredentials);
|
||||||
console.log('Successfully updated workitem files with pull request URLs');
|
console.log('Successfully updated workitem files with pull request URLs');
|
||||||
} else {
|
} else {
|
||||||
console.log('No workitem files were updated');
|
console.log('No workitem files were updated');
|
||||||
@ -230,14 +246,14 @@ export class ProcessorService {
|
|||||||
|
|
||||||
// Clone the project repository
|
// Clone the project repository
|
||||||
console.log(`Cloning project repository: ${project.repoUrl}`);
|
console.log(`Cloning project repository: ${project.repoUrl}`);
|
||||||
const projectRepoPath = await this.repositoryService.cloneProjectRepository(project, credentials);
|
const projectRepoPath = await this.sharedRepositoryService.cloneProjectRepository(project, credentials);
|
||||||
|
|
||||||
// Create a GeminiProjectProcessor to handle the project
|
// Create a ProjectWorkitemsService to handle the project
|
||||||
const geminiProjectProcessor = new GeminiProjectProcessor();
|
const projectWorkitemsService = new ProjectWorkitemsService();
|
||||||
|
|
||||||
// Let Gemini operate within the project
|
// Process workitems within the project
|
||||||
console.log(`Letting Gemini operate within project: ${project.name}`);
|
console.log(`Processing workitems within project: ${project.name}`);
|
||||||
const result = await geminiProjectProcessor.processProject(project, projectRepoPath);
|
const result = await projectWorkitemsService.processProject(project, projectRepoPath);
|
||||||
|
|
||||||
// If no workitems were processed or there was an error, return early
|
// If no workitems were processed or there was an error, return early
|
||||||
if (result.processedWorkitems.length === 0 || result.error) {
|
if (result.processedWorkitems.length === 0 || result.error) {
|
||||||
@ -256,24 +272,33 @@ export class ProcessorService {
|
|||||||
|
|
||||||
// Create a new branch for changes
|
// Create a new branch for changes
|
||||||
const branchName = `update-workitems-${new Date().toISOString().split('T')[0]}`;
|
const branchName = `update-workitems-${new Date().toISOString().split('T')[0]}`;
|
||||||
await this.repositoryService.createBranch(projectRepoPath, branchName);
|
await this.sharedRepositoryService.createBranch(projectRepoPath, branchName);
|
||||||
|
|
||||||
// Commit changes
|
// Commit changes
|
||||||
await this.repositoryService.commitChanges(
|
await this.sharedRepositoryService.commitChanges(
|
||||||
projectRepoPath,
|
projectRepoPath,
|
||||||
`Update workitems: ${new Date().toISOString().split('T')[0]}`
|
`Update workitems: ${new Date().toISOString().split('T')[0]}`
|
||||||
);
|
);
|
||||||
|
|
||||||
// Push changes
|
// Push changes
|
||||||
await this.repositoryService.pushChanges(projectRepoPath, branchName, credentials);
|
await this.sharedRepositoryService.pushChanges(projectRepoPath, branchName, credentials);
|
||||||
|
|
||||||
|
// Generate PR description using Gemini
|
||||||
|
const description = await this.geminiService.generatePullRequestDescription(
|
||||||
|
result.processedWorkitems,
|
||||||
|
result.gitPatch
|
||||||
|
);
|
||||||
|
|
||||||
|
// Generate PR title
|
||||||
|
const title = `Update workitems: ${new Date().toISOString().split('T')[0]}`;
|
||||||
|
|
||||||
// Create pull request
|
// Create pull request
|
||||||
const pullRequestUrl = await this.pullRequestService.createPullRequest(
|
const pullRequestUrl = await this.sharedPullRequestService.createPullRequest(
|
||||||
project,
|
project,
|
||||||
branchName,
|
branchName,
|
||||||
result.processedWorkitems,
|
|
||||||
credentials,
|
credentials,
|
||||||
result.gitPatch
|
title,
|
||||||
|
description
|
||||||
);
|
);
|
||||||
|
|
||||||
console.log(`Created pull request: ${pullRequestUrl}`);
|
console.log(`Created pull request: ${pullRequestUrl}`);
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import {ProjectService as SharedProjectService, Project, Workitem} from 'shared-functions';
|
import {ProjectService as SharedProjectService, Project, Workitem} from 'shared-functions';
|
||||||
|
import { WorkitemImplementationStatus } from '../types';
|
||||||
|
|
||||||
export class ProjectService {
|
export class ProjectService {
|
||||||
private sharedProjectService: SharedProjectService;
|
private sharedProjectService: SharedProjectService;
|
||||||
@ -176,22 +177,23 @@ export class ProjectService {
|
|||||||
/**
|
/**
|
||||||
* Update workitem file with implementation log
|
* Update workitem file with implementation log
|
||||||
* @param workitem Workitem to update
|
* @param workitem Workitem to update
|
||||||
* @param status Status of the workitem (created, updated, deleted)
|
* @param status Status of the workitem implementation (create, update, delete)
|
||||||
* @param files Array of files that were created, updated, or deleted
|
* @param filesWritten Array of files that were created or updated
|
||||||
|
* @param filesRemoved Array of files that were removed
|
||||||
* @returns Updated workitem
|
* @returns Updated workitem
|
||||||
*/
|
*/
|
||||||
async updateWorkitemWithImplementationLog(
|
async updateWorkitemWithImplementationLog(
|
||||||
workitem: Workitem,
|
workitem: Workitem,
|
||||||
status: 'create' | 'update' | 'delete',
|
status: WorkitemImplementationStatus,
|
||||||
filesWritten: string[],
|
filesWritten: string[] = [],
|
||||||
filesRemoved: string[],
|
filesRemoved: string[] = [],
|
||||||
): Promise<Workitem> {
|
): Promise<Workitem> {
|
||||||
if (!fs.existsSync(workitem.path)) {
|
if (!fs.existsSync(workitem.path)) {
|
||||||
throw new Error(`Workitem file not found: ${workitem.path}`);
|
throw new Error(`Workitem file not found: ${workitem.path}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read the current content
|
// Read the current content
|
||||||
let content = fs.readFileSync(workitem.path, 'utf-8');
|
const content = fs.readFileSync(workitem.path, 'utf-8');
|
||||||
const lines = content.split('\n');
|
const lines = content.split('\n');
|
||||||
|
|
||||||
// Format the log message
|
// Format the log message
|
||||||
@ -211,11 +213,16 @@ export class ProjectService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Add the list of files
|
// Add the list of files
|
||||||
for (const file of filesWritten) {
|
if (filesWritten.length > 0) {
|
||||||
logMessage += `- Created ${file}\n`;
|
for (const file of filesWritten) {
|
||||||
|
logMessage += `- Created ${file}\n`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
for (const file of filesRemoved) {
|
|
||||||
logMessage += `- Removed ${file}\n`;
|
if (filesRemoved.length > 0) {
|
||||||
|
for (const file of filesRemoved) {
|
||||||
|
logMessage += `- Removed ${file}\n`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add PR URL if available
|
// Add PR URL if available
|
||||||
@ -236,9 +243,6 @@ export class ProjectService {
|
|||||||
nextSectionIndex = lines.length;
|
nextSectionIndex = lines.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the existing log content
|
|
||||||
const existingLogContent = lines.slice(logSectionIndex + 1, nextSectionIndex).join('\n');
|
|
||||||
|
|
||||||
// Insert the new log message after the "### Log" line and before any existing content
|
// Insert the new log message after the "### Log" line and before any existing content
|
||||||
const beforeLog = lines.slice(0, logSectionIndex + 1);
|
const beforeLog = lines.slice(0, logSectionIndex + 1);
|
||||||
const afterLog = lines.slice(nextSectionIndex);
|
const afterLog = lines.slice(nextSectionIndex);
|
||||||
@ -252,8 +256,10 @@ export class ProjectService {
|
|||||||
fs.writeFileSync(workitem.path, updatedContent, 'utf-8');
|
fs.writeFileSync(workitem.path, updatedContent, 'utf-8');
|
||||||
} else {
|
} else {
|
||||||
// If no Log section is found, append it to the end of the file
|
// If no Log section is found, append it to the end of the file
|
||||||
console.log(`No "### Log" section found in workitem ${workitem.name}, appending to the end`);
|
if (lines[lines.length - 1] !== '') {
|
||||||
lines.push('\n### Log');
|
lines.push(''); // Add a blank line before the log section if needed
|
||||||
|
}
|
||||||
|
lines.push('### Log');
|
||||||
lines.push(''); // Add a blank line after the log title
|
lines.push(''); // Add a blank line after the log title
|
||||||
lines.push(logMessage);
|
lines.push(logMessage);
|
||||||
|
|
||||||
|
@ -1,36 +1,42 @@
|
|||||||
/**
|
/**
|
||||||
* Service for handling Gemini operations within a project
|
* Service for handling workitem operations within a project
|
||||||
*/
|
*/
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import {ProcessResult, Project, Workitem} from '../types';
|
import {ProcessResult} from '../types';
|
||||||
import {ProjectService} from './project-service';
|
import {ProjectService} from './project-service';
|
||||||
import {RepositoryService} from './repository-service';
|
|
||||||
import {DRY_RUN_SKIP_GEMINI} from '../config';
|
import {DRY_RUN_SKIP_GEMINI} from '../config';
|
||||||
import {GeminiFileSystemService} from 'shared-functions';
|
import {
|
||||||
|
GeminiFileSystemService,
|
||||||
|
Project,
|
||||||
|
Workitem,
|
||||||
|
RepositoryService as SharedRepositoryService
|
||||||
|
} from 'shared-functions';
|
||||||
|
|
||||||
export class GeminiProjectProcessor {
|
export class ProjectWorkitemsService {
|
||||||
private projectService: ProjectService;
|
private projectService: ProjectService;
|
||||||
private repositoryService: RepositoryService;
|
private sharedRepositoryService: SharedRepositoryService;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.projectService = new ProjectService();
|
this.projectService = new ProjectService();
|
||||||
this.repositoryService = new RepositoryService();
|
this.sharedRepositoryService = new SharedRepositoryService(
|
||||||
|
path.join(require('os').tmpdir(), 'prompts-to-test-spec')
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Process the project using Gemini
|
* Process the project workitems
|
||||||
* @param project Project to process
|
* @param project Project to process
|
||||||
* @param projectRepoPath Path to the project repository
|
* @param projectRepoPath Path to the project repository
|
||||||
* @returns Process result
|
* @returns Process result
|
||||||
*/
|
*/
|
||||||
async processProject(project: Project, projectRepoPath: string): Promise<ProcessResult> {
|
async processProject(project: Project, projectRepoPath: string): Promise<ProcessResult> {
|
||||||
console.log(`GeminiProjectProcessor: Processing project ${project.name}`);
|
console.log(`ProjectWorkitemsService: Processing project ${project.name}`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Find all workitems in the project
|
// Find all workitems in the project
|
||||||
const workitems = await this.projectService.findWorkitems(project.path);
|
const workitems = await this.projectService.findWorkitems(project.path);
|
||||||
console.log(`GeminiProjectProcessor: Found ${workitems.length} workitems in project ${project.name}`);
|
console.log(`ProjectWorkitemsService: Found ${workitems.length} workitems in project ${project.name}`);
|
||||||
|
|
||||||
// Skip if no workitems found
|
// Skip if no workitems found
|
||||||
if (workitems.length === 0) {
|
if (workitems.length === 0) {
|
||||||
@ -46,7 +52,7 @@ export class GeminiProjectProcessor {
|
|||||||
// Process each workitem
|
// Process each workitem
|
||||||
const processedWorkitems = [];
|
const processedWorkitems = [];
|
||||||
for (const workitem of workitems) {
|
for (const workitem of workitems) {
|
||||||
console.log(`GeminiProjectProcessor: Processing workitem: ${workitem.name}`);
|
console.log(`ProjectWorkitemsService: Processing workitem: ${workitem.name}`);
|
||||||
const result = await this.processWorkitem(project, projectRepoPath, workitem, projectGuidelines);
|
const result = await this.processWorkitem(project, projectRepoPath, workitem, projectGuidelines);
|
||||||
processedWorkitems.push({workitem, ...result});
|
processedWorkitems.push({workitem, ...result});
|
||||||
}
|
}
|
||||||
@ -58,7 +64,7 @@ export class GeminiProjectProcessor {
|
|||||||
if (totalFilesWritten > 0) {
|
if (totalFilesWritten > 0) {
|
||||||
try {
|
try {
|
||||||
console.log(`Generating git patch for project ${project.name} with ${totalFilesWritten} files written`);
|
console.log(`Generating git patch for project ${project.name} with ${totalFilesWritten} files written`);
|
||||||
gitPatch = await this.repositoryService.generateGitPatch(projectRepoPath);
|
gitPatch = await this.sharedRepositoryService.generateGitPatch(projectRepoPath);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error generating git patch for project ${project.name}:`, error);
|
console.error(`Error generating git patch for project ${project.name}:`, error);
|
||||||
}
|
}
|
||||||
@ -101,7 +107,7 @@ export class GeminiProjectProcessor {
|
|||||||
}> {
|
}> {
|
||||||
try {
|
try {
|
||||||
// Set the current workitem
|
// Set the current workitem
|
||||||
console.log(`GeminiProjectProcessor: Processing workitem: ${workitem.name} (Active: ${workitem.isActive})`);
|
console.log(`ProjectWorkitemsService: Processing workitem: ${workitem.name} (Active: ${workitem.isActive})`);
|
||||||
|
|
||||||
// Read workitem content
|
// Read workitem content
|
||||||
const workitemContent = fs.readFileSync(workitem.path, 'utf-8');
|
const workitemContent = fs.readFileSync(workitem.path, 'utf-8');
|
||||||
@ -161,13 +167,13 @@ export class GeminiProjectProcessor {
|
|||||||
result.filesDeleted
|
result.filesDeleted
|
||||||
);
|
);
|
||||||
|
|
||||||
console.log(`GeminiProjectProcessor: Updated workitem file with implementation log for ${workitem.name}`);
|
console.log(`ProjectWorkitemsService: Updated workitem file with implementation log for ${workitem.name}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error updating workitem file with implementation log: ${error}`);
|
console.error(`Error updating workitem file with implementation log: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`GeminiProjectProcessor: Completed processing workitem: ${workitem.name} (Status: ${decision}, Files written: ${result.filesWritten.length})`);
|
console.log(`ProjectWorkitemsService: Completed processing workitem: ${workitem.name} (Status: ${decision}, Files written: ${result.filesWritten.length})`);
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
decision,
|
decision,
|
||||||
@ -205,7 +211,7 @@ export class GeminiProjectProcessor {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(`GeminiProjectProcessor: Collected ${Object.keys(relevantFiles).length} relevant files for workitem ${workitem.name}`);
|
console.log(`ProjectWorkitemsService: Collected ${Object.keys(relevantFiles).length} relevant files for workitem ${workitem.name}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error collecting relevant files for workitem ${workitem.name}:`, error);
|
console.error(`Error collecting relevant files for workitem ${workitem.name}:`, error);
|
||||||
}
|
}
|
||||||
@ -284,6 +290,4 @@ export class GeminiProjectProcessor {
|
|||||||
filesDeleted: result.filesDeleted
|
filesDeleted: result.filesDeleted
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
@ -1,77 +0,0 @@
|
|||||||
/**
|
|
||||||
* Service for handling pull request operations
|
|
||||||
*/
|
|
||||||
import {
|
|
||||||
PullRequestService as SharedPullRequestService,
|
|
||||||
Project,
|
|
||||||
RepoCredentials,
|
|
||||||
Workitem,
|
|
||||||
GeminiService
|
|
||||||
} from 'shared-functions';
|
|
||||||
import {GOOGLE_CLOUD_PROJECT_ID, GOOGLE_CLOUD_LOCATION, GEMINI_MODEL, DRY_RUN_SKIP_GEMINI} from '../config';
|
|
||||||
|
|
||||||
export class PullRequestService {
|
|
||||||
private sharedPullRequestService: SharedPullRequestService;
|
|
||||||
private geminiService: GeminiService;
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
this.sharedPullRequestService = new SharedPullRequestService();
|
|
||||||
this.geminiService = new GeminiService(
|
|
||||||
GOOGLE_CLOUD_PROJECT_ID,
|
|
||||||
GOOGLE_CLOUD_LOCATION,
|
|
||||||
GEMINI_MODEL,
|
|
||||||
DRY_RUN_SKIP_GEMINI
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a pull request for changes in a repository
|
|
||||||
* @param project Project information
|
|
||||||
* @param branchName Name of the branch with changes
|
|
||||||
* @param processedWorkitems List of processed workitems
|
|
||||||
* @param credentials Repository credentials
|
|
||||||
* @param gitPatch Optional git patch to include in the description
|
|
||||||
* @returns URL of the created pull request
|
|
||||||
*/
|
|
||||||
async createPullRequest(
|
|
||||||
project: Project,
|
|
||||||
branchName: string,
|
|
||||||
processedWorkitems: {
|
|
||||||
workitem: Workitem;
|
|
||||||
success: boolean;
|
|
||||||
error?: string;
|
|
||||||
status?: 'skipped' | 'updated' | 'created';
|
|
||||||
filesWritten?: string[]
|
|
||||||
}[],
|
|
||||||
credentials: RepoCredentials,
|
|
||||||
gitPatch?: string
|
|
||||||
): Promise<string> {
|
|
||||||
// Generate PR title and description
|
|
||||||
const title = `Update workitems: ${new Date().toISOString().split('T')[0]}`;
|
|
||||||
const description = await this.generatePullRequestDescription(processedWorkitems, gitPatch);
|
|
||||||
|
|
||||||
// Use the shared implementation to create the pull request
|
|
||||||
return this.sharedPullRequestService.createPullRequest(project, branchName, credentials, title, description);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate a description for the pull request using Gemini
|
|
||||||
* @param processedWorkitems List of processed workitems
|
|
||||||
* @param gitPatch Optional git patch to include in the description
|
|
||||||
* @returns Pull request description
|
|
||||||
*/
|
|
||||||
private async generatePullRequestDescription(
|
|
||||||
processedWorkitems: {
|
|
||||||
workitem: Workitem;
|
|
||||||
success: boolean;
|
|
||||||
error?: string;
|
|
||||||
status?: 'skipped' | 'updated' | 'created';
|
|
||||||
filesWritten?: string[]
|
|
||||||
}[],
|
|
||||||
gitPatch?: string
|
|
||||||
): Promise<string> {
|
|
||||||
// Use Gemini to generate the pull request description, passing the git patch
|
|
||||||
// so Gemini can analyze the code changes
|
|
||||||
return await this.geminiService.generatePullRequestDescription(processedWorkitems, gitPatch);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,82 +0,0 @@
|
|||||||
/**
|
|
||||||
* Service for handling repository operations
|
|
||||||
*/
|
|
||||||
import * as path from 'path';
|
|
||||||
import * as os from 'os';
|
|
||||||
import {RepositoryService as SharedRepositoryService, Project, RepoCredentials} from 'shared-functions';
|
|
||||||
|
|
||||||
export class RepositoryService {
|
|
||||||
private sharedRepositoryService: SharedRepositoryService;
|
|
||||||
|
|
||||||
constructor(baseDir?: string) {
|
|
||||||
// Use a different base directory for prompts-to-test-spec
|
|
||||||
const repoBaseDir = baseDir || path.join(os.tmpdir(), 'prompts-to-test-spec');
|
|
||||||
this.sharedRepositoryService = new SharedRepositoryService(repoBaseDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clone the main repository containing prompts
|
|
||||||
* @param repoUrl URL of the repository
|
|
||||||
* @param credentials Optional credentials for private repositories
|
|
||||||
* @returns Path to the cloned repository
|
|
||||||
*/
|
|
||||||
async cloneMainRepository(repoUrl: string, credentials?: RepoCredentials): Promise<string> {
|
|
||||||
return this.sharedRepositoryService.cloneMainRepository(repoUrl, credentials);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clone a project repository
|
|
||||||
* @param project Project information
|
|
||||||
* @param credentials Optional credentials for private repositories
|
|
||||||
* @returns Path to the cloned repository
|
|
||||||
*/
|
|
||||||
async cloneProjectRepository(project: Project, credentials?: RepoCredentials): Promise<string> {
|
|
||||||
return this.sharedRepositoryService.cloneProjectRepository(project, credentials);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new branch in a repository
|
|
||||||
* @param repoDir Path to the repository
|
|
||||||
* @param branchName Name of the branch to create
|
|
||||||
*/
|
|
||||||
async createBranch(repoDir: string, branchName: string): Promise<void> {
|
|
||||||
return this.sharedRepositoryService.createBranch(repoDir, branchName);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Commit changes to a repository
|
|
||||||
* @param repoDir Path to the repository
|
|
||||||
* @param message Commit message
|
|
||||||
*/
|
|
||||||
async commitChanges(repoDir: string, message: string): Promise<void> {
|
|
||||||
return this.sharedRepositoryService.commitChanges(repoDir, message);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Push changes to a repository
|
|
||||||
* @param repoDir Path to the repository
|
|
||||||
* @param branchName Name of the branch to push
|
|
||||||
* @param credentials Optional credentials for private repositories
|
|
||||||
*/
|
|
||||||
async pushChanges(repoDir: string, branchName: string, credentials?: RepoCredentials): Promise<void> {
|
|
||||||
return this.sharedRepositoryService.pushChanges(repoDir, branchName, credentials);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate a git patch of the changes in a repository
|
|
||||||
* @param repoDir Path to the repository
|
|
||||||
* @returns Git patch as a string
|
|
||||||
*/
|
|
||||||
async generateGitPatch(repoDir: string): Promise<string> {
|
|
||||||
return this.sharedRepositoryService.generateGitPatch(repoDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checkout an existing branch in a repository
|
|
||||||
* @param repoDir Path to the repository
|
|
||||||
* @param branchName Name of the branch to checkout
|
|
||||||
*/
|
|
||||||
async checkoutBranch(repoDir: string, branchName: string): Promise<void> {
|
|
||||||
return this.sharedRepositoryService.checkoutBranch(repoDir, branchName);
|
|
||||||
}
|
|
||||||
}
|
|
@ -2,6 +2,11 @@
|
|||||||
* Type definitions for the prompts-to-test-spec function
|
* Type definitions for the prompts-to-test-spec function
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Status of a workitem implementation
|
||||||
|
*/
|
||||||
|
export type WorkitemImplementationStatus = 'create' | 'update' | 'delete';
|
||||||
|
|
||||||
export interface Project {
|
export interface Project {
|
||||||
name: string;
|
name: string;
|
||||||
path: string;
|
path: string;
|
||||||
|
@ -29,17 +29,3 @@ export interface RepoCredentials {
|
|||||||
password?: string;
|
password?: string;
|
||||||
token?: string;
|
token?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ProcessResult {
|
|
||||||
project: Project;
|
|
||||||
processedWorkitems: {
|
|
||||||
workitem: Workitem;
|
|
||||||
success: boolean;
|
|
||||||
error?: string;
|
|
||||||
status?: 'skipped' | 'updated' | 'created';
|
|
||||||
filesWritten?: string[];
|
|
||||||
}[];
|
|
||||||
pullRequestUrl?: string;
|
|
||||||
error?: string;
|
|
||||||
gitPatch?: string;
|
|
||||||
}
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user