mirror of
https://github.com/noodlapp/noodl.git
synced 2026-01-11 23:02:53 +01:00
Initial commit
Co-Authored-By: Eric Tuvesson <eric.tuvesson@gmail.com> Co-Authored-By: mikaeltellhed <2311083+mikaeltellhed@users.noreply.github.com> Co-Authored-By: kotte <14197736+mrtamagotchi@users.noreply.github.com> Co-Authored-By: Anders Larsson <64838990+anders-topp@users.noreply.github.com> Co-Authored-By: Johan <4934465+joolsus@users.noreply.github.com> Co-Authored-By: Tore Knudsen <18231882+torekndsn@users.noreply.github.com> Co-Authored-By: victoratndl <99176179+victoratndl@users.noreply.github.com>
This commit is contained in:
14
packages/noodl-git/package.json
Normal file
14
packages/noodl-git/package.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "@noodl/git",
|
||||
"version": "2.7.0",
|
||||
"main": "src/index.ts",
|
||||
"description": "",
|
||||
"author": "Noodl <info@noodl.net>",
|
||||
"homepage": "https://noodl.net",
|
||||
"dependencies": {
|
||||
"desktop-trampoline": "https://github.com/desktop/desktop-trampoline/archive/refs/tags/v0.9.8.tar.gz",
|
||||
"dugite": "^1.106.0",
|
||||
"double-ended-queue": "^2.1.0-0",
|
||||
"split2": "^4.1.0"
|
||||
}
|
||||
}
|
||||
58
packages/noodl-git/src/actions/branch.ts
Normal file
58
packages/noodl-git/src/actions/branch.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { getBranches as gitGetBranches } from "../core/for-each-ref";
|
||||
import { Branch } from "../core/models/branch";
|
||||
import { sortBy, groupBy } from "underscore";
|
||||
import { deleteRef } from "../core/update-ref";
|
||||
import { pushDelete } from "../core/push";
|
||||
import { GitActionError, GitActionErrorCode } from "./git-action-error";
|
||||
|
||||
export async function getBranches(
|
||||
repositoryDir: string
|
||||
): Promise<readonly Branch[]> {
|
||||
const branches = await gitGetBranches(repositoryDir);
|
||||
const groupped = groupBy(branches, (x) => x.type);
|
||||
Object.keys(groupped).forEach((key) => sortBy(groupped[key], (x) => x.name));
|
||||
return Object.keys(groupped).flatMap((key) => groupped[key]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a local branch, this will leave the remote branch intact if there is one.
|
||||
*
|
||||
* @param repositoryDir
|
||||
* @param branch
|
||||
*/
|
||||
export async function deleteLocalBranch(
|
||||
repositoryDir: string,
|
||||
branch: Branch
|
||||
): Promise<void> {
|
||||
await deleteRef(repositoryDir, branch.ref);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a remote branch.
|
||||
*
|
||||
* @param repositoryDir
|
||||
* @param branch
|
||||
*/
|
||||
export async function deleteRemoteBranch(
|
||||
repositoryDir: string,
|
||||
branch: Branch
|
||||
): Promise<void> {
|
||||
if (!branch.upstream) {
|
||||
throw new Error("Branch is not remote.");
|
||||
}
|
||||
|
||||
try {
|
||||
await pushDelete(
|
||||
repositoryDir,
|
||||
branch.upstreamRemoteName,
|
||||
branch.nameWithoutRemote
|
||||
);
|
||||
} catch (error) {
|
||||
const message = error.toString();
|
||||
if (message.includes("remote ref does not exist")) {
|
||||
throw new GitActionError(GitActionErrorCode.BranchNotExisting);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
17
packages/noodl-git/src/actions/fetch.ts
Normal file
17
packages/noodl-git/src/actions/fetch.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { fetch as gitFetch } from '../core/fetch';
|
||||
import { IGitResult } from '../core/git-error';
|
||||
import { IFetchProgress } from '../core/models/progress';
|
||||
import { IRemote } from '../core/models/remote';
|
||||
import { createErrorFromMessage } from './git-action-error';
|
||||
|
||||
export async function fetch(
|
||||
repositoryDir: string,
|
||||
remote: IRemote,
|
||||
progressCallback?: (progress: IFetchProgress) => void
|
||||
): Promise<IGitResult> {
|
||||
try {
|
||||
return await gitFetch(repositoryDir, remote, progressCallback);
|
||||
} catch (error) {
|
||||
throw createErrorFromMessage(error.toString());
|
||||
}
|
||||
}
|
||||
48
packages/noodl-git/src/actions/git-action-error.ts
Normal file
48
packages/noodl-git/src/actions/git-action-error.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
export enum GitActionErrorCode {
|
||||
LocalRepository,
|
||||
InvalidBranchName,
|
||||
BranchNotExisting,
|
||||
StashNoLocalChanges,
|
||||
AuthorizationFailed
|
||||
}
|
||||
|
||||
/**
|
||||
* GitActionError will be thrown from the action functions,
|
||||
* which are designed to be a higher level of interaction
|
||||
* with git than the other functions.
|
||||
*/
|
||||
export class GitActionError extends Error {
|
||||
public constructor(public readonly code: GitActionErrorCode) {
|
||||
super(getMessage(code));
|
||||
}
|
||||
}
|
||||
|
||||
function getMessage(code: GitActionErrorCode): string {
|
||||
switch (code) {
|
||||
case GitActionErrorCode.LocalRepository:
|
||||
return 'Repository is not published.';
|
||||
|
||||
case GitActionErrorCode.InvalidBranchName:
|
||||
return 'Branch name contains invalid characters.';
|
||||
|
||||
case GitActionErrorCode.BranchNotExisting:
|
||||
return 'Branch does not exist.';
|
||||
|
||||
case GitActionErrorCode.StashNoLocalChanges:
|
||||
return 'No local changes to save.';
|
||||
|
||||
case GitActionErrorCode.AuthorizationFailed:
|
||||
return 'Authorization failed.';
|
||||
|
||||
default:
|
||||
return String(code);
|
||||
}
|
||||
}
|
||||
|
||||
export function createErrorFromMessage(message: string) {
|
||||
if (message.includes('Authentication failed')) {
|
||||
return new GitActionError(GitActionErrorCode.AuthorizationFailed);
|
||||
} else {
|
||||
return new Error(message);
|
||||
}
|
||||
}
|
||||
87
packages/noodl-git/src/actions/history.ts
Normal file
87
packages/noodl-git/src/actions/history.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { getCommits } from "../core/logs";
|
||||
import { Branch } from "../core/models/branch";
|
||||
import { Commit } from "../core/models/snapshot";
|
||||
import { getAheadBehind, revSymmetricDifference } from "../core/rev-list";
|
||||
|
||||
export class CommitHistoryEntry extends Commit {
|
||||
public isLocalAhead: boolean;
|
||||
public isRemoteAhead: boolean;
|
||||
|
||||
public constructor(
|
||||
commit: Commit,
|
||||
isLocalAhead: boolean,
|
||||
isRemoteAhead: boolean
|
||||
) {
|
||||
super(
|
||||
commit.repositoryDir,
|
||||
commit.sha,
|
||||
commit.shortSha,
|
||||
commit.summary,
|
||||
commit.body,
|
||||
commit.author,
|
||||
commit.committer,
|
||||
commit.parentSHAs,
|
||||
commit.tags
|
||||
);
|
||||
|
||||
this.isLocalAhead = isLocalAhead;
|
||||
this.isRemoteAhead = isRemoteAhead;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Class designed to handle history between multiple branches.
|
||||
*/
|
||||
export class CommitHistory {
|
||||
public constructor(
|
||||
public readonly repositoryDir: string,
|
||||
public readonly branch: Branch
|
||||
) {}
|
||||
|
||||
public async fetch(count: number): Promise<readonly CommitHistoryEntry[]> {
|
||||
const localGitCommits = await getCommits(
|
||||
this.repositoryDir,
|
||||
undefined,
|
||||
count
|
||||
);
|
||||
|
||||
let commits: CommitHistoryEntry[] = localGitCommits.map(
|
||||
(x) => new CommitHistoryEntry(x, false, false)
|
||||
);
|
||||
|
||||
if (this.branch.remote) {
|
||||
const remoteAheadGitCommits = await getCommits(
|
||||
this.repositoryDir,
|
||||
`${this.branch.nameWithoutRemote}..${this.branch.remote.name}`,
|
||||
count
|
||||
);
|
||||
|
||||
const remoteOnlyCommits = remoteAheadGitCommits.map(
|
||||
(x) => new CommitHistoryEntry(x, false, true)
|
||||
);
|
||||
|
||||
//get commits that aren't pushed
|
||||
const localAheadGitCommits = await getCommits(
|
||||
this.repositoryDir,
|
||||
`${this.branch.remote.name}..${this.branch.nameWithoutRemote}`,
|
||||
count
|
||||
);
|
||||
|
||||
localAheadGitCommits.forEach((aheadCommit) => {
|
||||
const c = commits.find((c) => c.sha === aheadCommit.sha);
|
||||
if (c) {
|
||||
c.isLocalAhead = true;
|
||||
}
|
||||
});
|
||||
|
||||
commits = remoteOnlyCommits.concat(commits);
|
||||
} else {
|
||||
// there is no remote, it's a local branch
|
||||
// flag all the commits as being "ahead" of
|
||||
// the remote (even the commit that was the branching points, and commits before it)
|
||||
commits.forEach((c) => (c.isLocalAhead = true));
|
||||
}
|
||||
|
||||
return commits;
|
||||
}
|
||||
}
|
||||
7
packages/noodl-git/src/actions/index.ts
Normal file
7
packages/noodl-git/src/actions/index.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export * from './branch';
|
||||
export * from './history';
|
||||
export * from './git-action-error';
|
||||
export * from './push';
|
||||
export * from './remote';
|
||||
export * from './pull';
|
||||
export * from './fetch';
|
||||
18
packages/noodl-git/src/actions/pull.ts
Normal file
18
packages/noodl-git/src/actions/pull.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Branch } from '../core/models/branch';
|
||||
import { IPullProgress } from '../core/models/progress';
|
||||
import { IRemote } from '../core/models/remote';
|
||||
import { pull as gitPull } from '../core/pull';
|
||||
import { createErrorFromMessage, GitActionError, GitActionErrorCode } from './git-action-error';
|
||||
|
||||
export async function pull(
|
||||
repositoryDir: string,
|
||||
remote: IRemote,
|
||||
branch: Branch | string,
|
||||
progressCallback?: (progress: IPullProgress) => void
|
||||
): Promise<void> {
|
||||
try {
|
||||
return await gitPull(repositoryDir, remote, branch, progressCallback);
|
||||
} catch (error) {
|
||||
throw createErrorFromMessage(error.toString());
|
||||
}
|
||||
}
|
||||
37
packages/noodl-git/src/actions/push.ts
Normal file
37
packages/noodl-git/src/actions/push.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { Branch } from '../core/models/branch';
|
||||
import { IPushProgress } from '../core/models/progress';
|
||||
import { push as gitPush } from '../core/push';
|
||||
import { createErrorFromMessage, GitActionError, GitActionErrorCode } from './git-action-error';
|
||||
import { getRemote } from './remote';
|
||||
|
||||
interface PushOptions {
|
||||
baseDir: string;
|
||||
currentBranch: Branch;
|
||||
|
||||
onProgress?: (progress: IPushProgress) => void;
|
||||
}
|
||||
|
||||
export async function push({ baseDir, currentBranch, onProgress }: PushOptions): Promise<boolean> {
|
||||
const remote = await getRemote(baseDir);
|
||||
|
||||
try {
|
||||
return await gitPush(
|
||||
baseDir,
|
||||
remote,
|
||||
currentBranch.nameWithoutRemote,
|
||||
currentBranch.upstreamWithoutRemote,
|
||||
[],
|
||||
undefined,
|
||||
onProgress
|
||||
);
|
||||
} catch (error) {
|
||||
const message = error.toString();
|
||||
if (message.includes('Updates were rejected because the remote contains work that you do')) {
|
||||
throw new Error(
|
||||
'Updates were rejected because there are new changes that you do not have locally. Pull to get the latest changes.'
|
||||
);
|
||||
}
|
||||
|
||||
throw createErrorFromMessage(error.toString());
|
||||
}
|
||||
}
|
||||
30
packages/noodl-git/src/actions/remote.ts
Normal file
30
packages/noodl-git/src/actions/remote.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { IRemote } from '../core/models/remote';
|
||||
import { getRemotes, setRemoteURL as _setRemoteURL } from '../core/remotes';
|
||||
import { GitActionError, GitActionErrorCode } from './git-action-error';
|
||||
|
||||
/**
|
||||
* Returns a single remote.
|
||||
*
|
||||
* @param repositoryDir
|
||||
* @throws {GitHelperError}
|
||||
* @returns A single remote.
|
||||
*/
|
||||
export async function getRemote(repositoryDir: string): Promise<Readonly<IRemote>> {
|
||||
const remotes = await getRemotes(repositoryDir);
|
||||
|
||||
if (remotes.length === 0) {
|
||||
// When there are no remotes, we assume that the repository is local only.
|
||||
// This might not always be the case,
|
||||
// but ideally a remote branch should have been created.
|
||||
throw new GitActionError(GitActionErrorCode.LocalRepository);
|
||||
}
|
||||
|
||||
// TODO: It would be nice if the git client selects a default remote
|
||||
// and then we work from that remote, so you can
|
||||
// technically have many different remotes at once.
|
||||
return remotes[0];
|
||||
}
|
||||
|
||||
export async function setRemoteURL(repositoryDir: string, remoteName: string, url: string): Promise<void> {
|
||||
await _setRemoteURL(repositoryDir, remoteName, url);
|
||||
}
|
||||
3
packages/noodl-git/src/constants.ts
Normal file
3
packages/noodl-git/src/constants.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export const DEFAULT_REMOTE = 'origin';
|
||||
export const DEFAULT_BRANCH = 'main';
|
||||
export const DEFAULT_REF = `${DEFAULT_REMOTE}/${DEFAULT_BRANCH}`;
|
||||
5
packages/noodl-git/src/core/add.ts
Normal file
5
packages/noodl-git/src/core/add.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { git } from './client';
|
||||
|
||||
export async function addAll(basePath: string) {
|
||||
await git(['add', '-A'], basePath, 'addAll');
|
||||
}
|
||||
114
packages/noodl-git/src/core/apply.ts
Normal file
114
packages/noodl-git/src/core/apply.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { GitError as DugiteError } from 'dugite';
|
||||
import { git } from './client';
|
||||
import { WorkingDirectoryFileChange, FileStatusKind } from './models/status';
|
||||
import { DiffType, ITextDiff, DiffSelection } from './models/diff';
|
||||
import { getWorkingDirectoryDiff } from './diff';
|
||||
import { formatPatch, formatPatchToDiscardChanges } from './patch-formatter';
|
||||
|
||||
export async function applyPatchToIndex(repositoryPath: string, file: WorkingDirectoryFileChange): Promise<void> {
|
||||
// If the file was a rename we have to recreate that rename since we've
|
||||
// just blown away the index. Think of this block of weird looking commands
|
||||
// as running `git mv`.
|
||||
if (file.status.kind === FileStatusKind.Renamed) {
|
||||
// Make sure the index knows of the removed file. We could use
|
||||
// update-index --force-remove here but we're not since it's
|
||||
// possible that someone staged a rename and then recreated the
|
||||
// original file and we don't have any guarantees for in which order
|
||||
// partial stages vs full-file stages happen. By using git add the
|
||||
// worst that could happen is that we re-stage a file already staged
|
||||
// by updateIndex.
|
||||
await git(['add', '--u', '--', file.status.oldPath], repositoryPath, 'applyPatchToIndex');
|
||||
|
||||
// Figure out the blob oid of the removed file
|
||||
// <mode> SP <type> SP <object> TAB <file>
|
||||
const oldFile = await git(['ls-tree', 'HEAD', '--', file.status.oldPath], repositoryPath, 'applyPatchToIndex');
|
||||
|
||||
const [info] = oldFile.output.toString().split('\t', 1);
|
||||
const [mode, , oid] = info.split(' ', 3);
|
||||
|
||||
// Add the old file blob to the index under the new name
|
||||
await git(['update-index', '--add', '--cacheinfo', mode, oid, file.path], repositoryPath, 'applyPatchToIndex');
|
||||
}
|
||||
|
||||
const applyArgs: string[] = ['apply', '--cached', '--unidiff-zero', '--whitespace=nowarn', '-'];
|
||||
|
||||
const diff = await getWorkingDirectoryDiff(repositoryPath, file);
|
||||
|
||||
if (diff.kind !== DiffType.Text && diff.kind !== DiffType.LargeText) {
|
||||
const { kind } = diff;
|
||||
switch (diff.kind) {
|
||||
case DiffType.Binary:
|
||||
case DiffType.Image:
|
||||
throw new Error(`Can't create partial commit in binary file: ${file.path}`);
|
||||
case DiffType.Unrenderable:
|
||||
throw new Error(`File diff is too large to generate a partial commit: ${file.path}`);
|
||||
default:
|
||||
throw new Error(`Unknown diff kind: ${kind}`);
|
||||
}
|
||||
}
|
||||
|
||||
const patch = await formatPatch(file, diff);
|
||||
await git(applyArgs, repositoryPath, 'applyPatchToIndex', { stdin: patch });
|
||||
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test a patch to see if it will apply cleanly.
|
||||
*
|
||||
* @param workTree work tree (which should be checked out to a specific commit)
|
||||
* @param patch a Git patch (or patch series) to try applying
|
||||
* @returns whether the patch applies cleanly
|
||||
*
|
||||
* See `formatPatch` to generate a patch series from existing Git commits
|
||||
*/
|
||||
export async function checkPatch(workTreeDir: string, patch: string): Promise<boolean> {
|
||||
const result = await git(['apply', '--check', '-'], workTreeDir, 'checkPatch', {
|
||||
stdin: patch,
|
||||
stdinEncoding: 'utf8',
|
||||
expectedErrors: new Set<DugiteError>([DugiteError.PatchDoesNotApply])
|
||||
});
|
||||
|
||||
if (result.gitError === DugiteError.PatchDoesNotApply) {
|
||||
// other errors will be thrown if encountered, so this is fine for now
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discards the local changes for the specified file based on the passed diff
|
||||
* and a selection of lines from it.
|
||||
*
|
||||
* When passed an empty selection, this method won't do anything. When passed a
|
||||
* full selection, all changes from the file will be discarded.
|
||||
*
|
||||
* @param repository The repository in which to update the working directory
|
||||
* with information from the index
|
||||
*
|
||||
* @param filePath The relative path in the working directory of the file to use
|
||||
*
|
||||
* @param diff The diff containing the file local changes
|
||||
*
|
||||
* @param selection The selection of changes from the diff to discard
|
||||
*/
|
||||
export async function discardChangesFromSelection(
|
||||
repositoryPath: string,
|
||||
filePath: string,
|
||||
diff: ITextDiff,
|
||||
selection: DiffSelection
|
||||
) {
|
||||
const patch = formatPatchToDiscardChanges(filePath, diff, selection);
|
||||
|
||||
if (patch === null) {
|
||||
// When the patch is null we don't need to apply it since it will be a noop.
|
||||
return;
|
||||
}
|
||||
|
||||
const args = ['apply', '--unidiff-zero', '--whitespace=nowarn', '-'];
|
||||
|
||||
await git(args, repositoryPath, 'discardChangesFromSelection', {
|
||||
stdin: patch
|
||||
});
|
||||
}
|
||||
21
packages/noodl-git/src/core/attributes.ts
Normal file
21
packages/noodl-git/src/core/attributes.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
export async function appendGitAttributes(repositoryDir: string, newItems: string[]) {
|
||||
const gitAttributesPath = path.join(repositoryDir, '.gitattributes');
|
||||
const content = fs.existsSync(gitAttributesPath)
|
||||
? await fs.promises.readFile(gitAttributesPath, { encoding: 'utf-8' })
|
||||
: '';
|
||||
const lineByLine = content.split('\n').map((x) => x.trim());
|
||||
|
||||
const newContent = [];
|
||||
newItems.forEach((text) => {
|
||||
if (!lineByLine.includes(text)) {
|
||||
newContent.push(text);
|
||||
}
|
||||
});
|
||||
|
||||
if (newContent.length > 0) {
|
||||
await fs.promises.appendFile(gitAttributesPath, newContent.join('\r\n') + '\r\n');
|
||||
}
|
||||
}
|
||||
9
packages/noodl-git/src/core/authentication.ts
Normal file
9
packages/noodl-git/src/core/authentication.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { GitError as DugiteError } from 'dugite';
|
||||
|
||||
/** The set of errors which fit under the "authentication failed" umbrella. */
|
||||
export const AuthenticationErrors: ReadonlySet<DugiteError> = new Set([
|
||||
DugiteError.HTTPSAuthenticationFailed,
|
||||
DugiteError.SSHAuthenticationFailed,
|
||||
DugiteError.HTTPSRepositoryNotFound,
|
||||
DugiteError.SSHRepositoryNotFound
|
||||
]);
|
||||
51
packages/noodl-git/src/core/branch.ts
Normal file
51
packages/noodl-git/src/core/branch.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { git } from './client';
|
||||
import { Branch } from './models/branch';
|
||||
|
||||
/**
|
||||
* Returns the current branch name.
|
||||
*
|
||||
* For example: 'main'
|
||||
*/
|
||||
export async function currentBranchName(repositoryDir: string): Promise<string> {
|
||||
// Used 'rev-parse' before, but
|
||||
const args = ['rev-parse', '--abbrev-ref', 'HEAD'];
|
||||
|
||||
try {
|
||||
const { output } = await git(args, repositoryDir, 'currentBranchName');
|
||||
return output.toString().trim();
|
||||
} catch {
|
||||
// This will happen when there is no commit
|
||||
return 'null';
|
||||
}
|
||||
}
|
||||
|
||||
export async function createBranch(
|
||||
basePath: string,
|
||||
name: string,
|
||||
startPoint: string | null,
|
||||
noTrack?: boolean
|
||||
): Promise<void> {
|
||||
const args = startPoint !== null ? ['branch', name, startPoint] : ['branch', name];
|
||||
|
||||
// if we're branching directly from a remote branch, we don't want to track it
|
||||
// tracking it will make the rest of desktop think we want to push to that
|
||||
// remote branch's upstream (which would likely be the upstream of the fork)
|
||||
if (noTrack) {
|
||||
args.push('--no-track');
|
||||
}
|
||||
|
||||
await git(args, basePath, 'createBranch');
|
||||
}
|
||||
|
||||
/** Rename the given branch to a new name. */
|
||||
export async function renameBranch(repositoryDir: string, branch: Branch, newName: string): Promise<void> {
|
||||
await git(['branch', '-m', branch.nameWithoutRemote, newName], repositoryDir, 'renameBranch');
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the branch locally.
|
||||
*/
|
||||
export async function deleteLocalBranch(repositoryDir: string, branchName: string): Promise<true> {
|
||||
await git(['branch', '-D', branchName], repositoryDir, 'deleteLocalBranch');
|
||||
return true;
|
||||
}
|
||||
19
packages/noodl-git/src/core/cat-file.ts
Normal file
19
packages/noodl-git/src/core/cat-file.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { git, GitExecutionOptions } from './client';
|
||||
|
||||
export async function getFileContents(repositoryDir: string, blobSha: string): Promise<string> {
|
||||
const args = ['cat-file', '-p', blobSha];
|
||||
const opts: GitExecutionOptions = {
|
||||
successExitCodes: new Set([0, 1]),
|
||||
spawn: false,
|
||||
processCallback: (cb) => {
|
||||
// If Node.js encounters a synchronous runtime error while spawning
|
||||
// `stdout` will be undefined and the error will be emitted asynchronously
|
||||
if (cb.stdout) {
|
||||
cb.stdout.setEncoding('binary');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const { output } = await git(args, repositoryDir, 'getFileContents', opts);
|
||||
return output;
|
||||
}
|
||||
88
packages/noodl-git/src/core/checkout.ts
Normal file
88
packages/noodl-git/src/core/checkout.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { git, GitExecutionOptions, gitNetworkArguments } from './client';
|
||||
import { Branch, BranchType } from './models/branch';
|
||||
import { ICheckoutProgress } from './models/progress';
|
||||
import { CheckoutProgressParser, executionOptionsWithProgress } from './progress';
|
||||
import { AuthenticationErrors } from './authentication';
|
||||
|
||||
export type ProgressCallback = (progress: ICheckoutProgress) => void;
|
||||
|
||||
export type CheckoutFlags = {
|
||||
force?: boolean;
|
||||
commitish?: string;
|
||||
};
|
||||
|
||||
async function getCheckoutArgs(
|
||||
repositoryDir: string,
|
||||
branch: Branch,
|
||||
flags?: CheckoutFlags,
|
||||
progressCallback?: ProgressCallback
|
||||
) {
|
||||
const networkArguments = await gitNetworkArguments(repositoryDir);
|
||||
|
||||
let baseArgs =
|
||||
progressCallback != null ? [...networkArguments, 'checkout', '--progress'] : [...networkArguments, 'checkout'];
|
||||
|
||||
// Add the flags
|
||||
if (flags?.force) baseArgs.push('--force');
|
||||
|
||||
if (branch.type === BranchType.Remote) {
|
||||
if (flags?.commitish) {
|
||||
return baseArgs.concat(branch.name, '-b', branch.nameWithoutRemote, flags.commitish, '--');
|
||||
}
|
||||
return baseArgs.concat(branch.name, '-b', branch.nameWithoutRemote, '--');
|
||||
}
|
||||
|
||||
return baseArgs.concat(branch.name, '--');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check out the given branch.
|
||||
*
|
||||
* @param repository - The repository in which the branch checkout should
|
||||
* take place
|
||||
*
|
||||
* @param branch - The branch name that should be checked out
|
||||
*
|
||||
* @param progressCallback - An optional function which will be invoked
|
||||
* with information about the current progress
|
||||
* of the checkout operation. When provided this
|
||||
* enables the '--progress' command line flag for
|
||||
* 'git checkout'.
|
||||
*/
|
||||
export async function checkoutBranch(
|
||||
repositoryDir: string,
|
||||
branch: Branch,
|
||||
flags?: CheckoutFlags,
|
||||
progressCallback?: ProgressCallback
|
||||
): Promise<true> {
|
||||
let opts: GitExecutionOptions = {
|
||||
env: {},
|
||||
expectedErrors: AuthenticationErrors
|
||||
};
|
||||
|
||||
if (progressCallback) {
|
||||
const title = `Checking out branch ${branch.name}`;
|
||||
const kind = 'checkout';
|
||||
const targetBranch = branch.name;
|
||||
|
||||
opts = await executionOptionsWithProgress(opts, new CheckoutProgressParser(), (progress) => {
|
||||
if (progress.kind === 'progress') {
|
||||
const description = progress.details.text;
|
||||
const value = progress.percent;
|
||||
|
||||
progressCallback({ kind, title, description, value, targetBranch });
|
||||
}
|
||||
});
|
||||
|
||||
// Initial progress
|
||||
progressCallback({ kind, title, value: 0, targetBranch });
|
||||
}
|
||||
|
||||
const args = await getCheckoutArgs(repositoryDir, branch, flags, progressCallback);
|
||||
|
||||
await git(args, repositoryDir, 'checkoutBranch', opts);
|
||||
|
||||
// we return `true` here so `GitStore.performFailableGitOperation`
|
||||
// will return _something_ differentiable from `undefined` if this succeeds
|
||||
return true;
|
||||
}
|
||||
11
packages/noodl-git/src/core/clean.ts
Normal file
11
packages/noodl-git/src/core/clean.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { git } from './client';
|
||||
|
||||
/**
|
||||
* Delete all the untracked files.
|
||||
*
|
||||
* @param repositoryDir
|
||||
* @returns
|
||||
*/
|
||||
export function cleanUntrackedFiles(repositoryDir: string) {
|
||||
return git(['clean', '-f', '-d'], repositoryDir, 'cleanUntrackedFiles');
|
||||
}
|
||||
175
packages/noodl-git/src/core/client.ts
Normal file
175
packages/noodl-git/src/core/client.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import { ChildProcess } from 'child_process';
|
||||
import { GitError as DugiteError } from 'dugite';
|
||||
import { IGitSpawnExecutionOptions } from 'dugite/build/lib/git-process';
|
||||
|
||||
import { getGitPath } from '../paths';
|
||||
import * as ClientExec from './client/client-exec';
|
||||
import * as ClientSpawn from './client/client-spawn';
|
||||
import { IGitRunOptions } from './client/common';
|
||||
import { IGitResult } from './git-error';
|
||||
import { withTrampolineEnv } from './trampoline/trampoline-environment';
|
||||
|
||||
/**
|
||||
* Return an array of command line arguments for network operation that override
|
||||
* the default git configuration values provided by local, global, or system
|
||||
* level git configs.
|
||||
*
|
||||
* These arguments should be inserted before the subcommand, i.e in
|
||||
* the case of `git pull` these arguments needs to go before the `pull`
|
||||
* argument.
|
||||
*
|
||||
* This should be used on the commands where we want to use
|
||||
* the trampoline server.
|
||||
*
|
||||
* @param repository the local repository associated with the command, to check
|
||||
* local, global and system config for an existing value.
|
||||
* If `null` if provided (for example, when cloning a new
|
||||
* repository), this function will check global and system
|
||||
* config for an existing `protocol.version` setting
|
||||
*/
|
||||
export async function gitNetworkArguments(_repositoryDir: string | null): Promise<ReadonlyArray<string>> {
|
||||
const baseArgs = [
|
||||
// Explicitly unset any defined credential helper, we rely on our
|
||||
// own askpass for authentication.
|
||||
'-c',
|
||||
'credential.helper='
|
||||
];
|
||||
|
||||
return baseArgs;
|
||||
}
|
||||
|
||||
export interface GitExecutionOptions extends IGitSpawnExecutionOptions {
|
||||
/**
|
||||
* The exit codes which indicate success to the
|
||||
* caller. Unexpected exit codes will be logged and an
|
||||
* error thrown. Defaults to 0 if undefined.
|
||||
*/
|
||||
readonly successExitCodes?: ReadonlySet<number>;
|
||||
|
||||
/**
|
||||
* The git errors which are expected by the caller. Unexpected errors will
|
||||
* be logged and an error thrown.
|
||||
*/
|
||||
readonly expectedErrors?: ReadonlySet<DugiteError>;
|
||||
|
||||
/**
|
||||
* An optional string or buffer which will be written to
|
||||
* the child process stdin stream immediately immediately
|
||||
* after spawning the process.
|
||||
*/
|
||||
readonly stdin?: string | Buffer;
|
||||
|
||||
/**
|
||||
* The encoding to use when writing to stdin, if the stdin
|
||||
* parameter is a string.
|
||||
*/
|
||||
readonly stdinEncoding?: BufferEncoding;
|
||||
|
||||
/**
|
||||
* While this is true; it will spawn the process instead which
|
||||
* results in that we can handle more of the output from the
|
||||
* process.
|
||||
* Otherwise the data will be cut off.
|
||||
*
|
||||
* Default: true
|
||||
*/
|
||||
readonly spawn?: boolean;
|
||||
|
||||
/**
|
||||
* The size the output buffer to allocate to the spawned process. Set this
|
||||
* if you are anticipating a large amount of output.
|
||||
*
|
||||
* If not specified, this will be 10MB (10485760 bytes) which should be
|
||||
* enough for most Git operations.
|
||||
*/
|
||||
readonly maxBuffer?: number;
|
||||
|
||||
/**
|
||||
* Should the process output be checked for errors?
|
||||
* Can be very slow when there's lost of output
|
||||
*
|
||||
* If not specified, this defaults to true
|
||||
*/
|
||||
readonly parseErrors?: boolean;
|
||||
|
||||
/**
|
||||
* An optional callback which will be invoked with the child
|
||||
* process instance after spawning the git process.
|
||||
*
|
||||
* Note that if the stdin parameter was specified the stdin
|
||||
* stream will be closed by the time this callback fires.
|
||||
*/
|
||||
readonly processCallback?: (process: ChildProcess) => void;
|
||||
|
||||
readonly processDataCallback?: (chunk: Buffer) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Shell out to git with the given arguments, at the given path.
|
||||
*
|
||||
* @param args The arguments to pass to `git`.
|
||||
*
|
||||
* @param path The working directory path for the execution of the
|
||||
* command.
|
||||
*
|
||||
* @param name The name for the command based on its caller's
|
||||
* context. This will be used for performance
|
||||
* measurements and debugging.
|
||||
*
|
||||
* @param options Configuration options for the execution of git,
|
||||
* see GitExecutionOptions for more information.
|
||||
*
|
||||
* Returns the result. If the command exits with a code not in
|
||||
* `successExitCodes` or an error not in `expectedErrors`, a `GitError` will be
|
||||
* thrown.
|
||||
*/
|
||||
export async function git(
|
||||
args: string[],
|
||||
path: string,
|
||||
name: string,
|
||||
options?: GitExecutionOptions
|
||||
): Promise<IGitResult> {
|
||||
return withTrampolineEnv(async (env) => {
|
||||
const enableLogging = !!localStorage.getItem('git-output');
|
||||
const enableTraceLogging = !!localStorage.getItem('git-trace');
|
||||
|
||||
if (enableLogging) {
|
||||
console.groupCollapsed(`[${name}] git ${args.join(' ')}`);
|
||||
console.log('path', path);
|
||||
console.log('args', args);
|
||||
console.log('options', options);
|
||||
console.groupEnd();
|
||||
}
|
||||
|
||||
if (!process.env.LOCAL_GIT_DIRECTORY) {
|
||||
process.env.LOCAL_GIT_DIRECTORY = getGitPath();
|
||||
}
|
||||
|
||||
const runOptions: IGitRunOptions = {
|
||||
name,
|
||||
path,
|
||||
args,
|
||||
enableLogging,
|
||||
options: {
|
||||
...(options || {}),
|
||||
env: {
|
||||
// Add the input envs
|
||||
...(options?.env || {}),
|
||||
|
||||
// Add the trampoline env
|
||||
...env,
|
||||
|
||||
// Add extra envs
|
||||
GIT_TERMINAL_PROMPT: '0',
|
||||
GIT_TRACE: enableTraceLogging ? '1' : '0'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (options?.spawn) {
|
||||
return await ClientSpawn.execute(runOptions);
|
||||
}
|
||||
|
||||
return await ClientExec.execute(runOptions);
|
||||
});
|
||||
}
|
||||
66
packages/noodl-git/src/core/client/client-exec.ts
Normal file
66
packages/noodl-git/src/core/client/client-exec.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { GitProcess } from "dugite";
|
||||
import { getDescriptionForError, IGitResult, GitError } from "../git-error";
|
||||
import { IGitRunOptions, rejectLog } from "./common";
|
||||
|
||||
export function execute({
|
||||
name,
|
||||
path,
|
||||
args,
|
||||
enableLogging,
|
||||
options,
|
||||
}: IGitRunOptions): Promise<IGitResult> {
|
||||
return new Promise<IGitResult>(async (resolve, reject) => {
|
||||
const gitResult = await GitProcess.exec(args, path, {
|
||||
env: options.env,
|
||||
stdin: options?.stdin,
|
||||
stdinEncoding: options?.stdinEncoding,
|
||||
processCallback: options?.processCallback,
|
||||
});
|
||||
|
||||
const exitCodes = options?.successExitCodes || new Set([0]);
|
||||
const acceptableExitCode =
|
||||
gitResult.exitCode !== null && exitCodes.has(gitResult.exitCode);
|
||||
|
||||
const shouldParseErrors = !(options?.parseErrors === false);
|
||||
|
||||
// Parse the error if acceptable exit code
|
||||
const gitError = shouldParseErrors
|
||||
? acceptableExitCode
|
||||
? GitProcess.parseError(gitResult.stderr) ??
|
||||
GitProcess.parseError(gitResult.stdout)
|
||||
: null
|
||||
: null;
|
||||
|
||||
const gitErrorDescription = gitError
|
||||
? getDescriptionForError(gitError)
|
||||
: null;
|
||||
|
||||
// Return the result
|
||||
const result: IGitResult = {
|
||||
output: gitResult.stdout,
|
||||
error: gitResult.stderr,
|
||||
exitCode: gitResult.exitCode,
|
||||
gitError,
|
||||
gitErrorDescription,
|
||||
path,
|
||||
};
|
||||
|
||||
if (enableLogging) {
|
||||
console.log(`[${name}] stdout: ${result.output.toString()}`);
|
||||
console.log(`[${name}] stderr: ${result.error.toString()}`);
|
||||
}
|
||||
|
||||
// Check if the error is acceptable
|
||||
const acceptableError =
|
||||
gitError && options?.expectedErrors
|
||||
? options.expectedErrors.has(gitError)
|
||||
: false;
|
||||
|
||||
if ((gitError && acceptableError) || acceptableExitCode) {
|
||||
return resolve(result);
|
||||
}
|
||||
|
||||
rejectLog(args, result);
|
||||
reject(new GitError(result, args));
|
||||
});
|
||||
}
|
||||
164
packages/noodl-git/src/core/client/client-spawn.ts
Normal file
164
packages/noodl-git/src/core/client/client-spawn.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import { GitProcess } from "dugite";
|
||||
import { getDescriptionForError, IGitResult, GitError } from "../git-error";
|
||||
import { isErrnoException } from "../errno-exception";
|
||||
import { IGitSpawnExecutionOptions } from "dugite/build/lib/git-process";
|
||||
import { IGitRunOptions, rejectLog } from "./common";
|
||||
|
||||
export function execute({
|
||||
name,
|
||||
path,
|
||||
args,
|
||||
enableLogging,
|
||||
options,
|
||||
}: IGitRunOptions): Promise<IGitResult> {
|
||||
const spawnOptions: IGitSpawnExecutionOptions = {
|
||||
...(options ?? {}),
|
||||
|
||||
// Explicitly set TERM to 'dumb' so that if Desktop was launched
|
||||
// from a terminal or if the system environment variables
|
||||
// have TERM set Git won't consider us as a smart terminal.
|
||||
// See https://github.com/git/git/blob/a7312d1a2/editor.c#L11-L15
|
||||
env: {
|
||||
TERM: "dumb",
|
||||
...(options ?? {}).env,
|
||||
} as Object,
|
||||
};
|
||||
|
||||
return new Promise<IGitResult>(async (resolve, reject) => {
|
||||
const process = GitProcess.spawn(args, path, spawnOptions);
|
||||
|
||||
// Send the stdin
|
||||
if (options?.stdin) {
|
||||
const stdinEncoding = options.stdinEncoding ?? "utf-8";
|
||||
const stdinEncoded = Buffer.from(options.stdin).toString(stdinEncoding);
|
||||
process.stdin.write(stdinEncoded);
|
||||
process.stdin.end();
|
||||
}
|
||||
|
||||
//
|
||||
if (options?.processCallback) {
|
||||
options.processCallback(process);
|
||||
}
|
||||
|
||||
process.on("error", (err) => {
|
||||
// If this is an exception thrown by Node.js while attempting to
|
||||
// spawn let's keep the salient details but include the name of
|
||||
// the operation.
|
||||
if (isErrnoException(err)) {
|
||||
reject(new Error(`Failed to execute ${name}: ${err.code}`));
|
||||
} else {
|
||||
// for unhandled errors raised by the process, let's surface this in the
|
||||
// promise and make the caller handle it
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
let totalStdoutLength = 0;
|
||||
let killSignalSent = false;
|
||||
|
||||
const stdoutChunks = new Array<Buffer>();
|
||||
|
||||
// If Node.js encounters a synchronous runtime error while spawning
|
||||
// `stdout` will be undefined and the error will be emitted asynchronously
|
||||
if (process.stdout) {
|
||||
process.stdout.on("data", (chunk: Buffer) => {
|
||||
if (enableLogging) {
|
||||
console.log(`[${name}] stdout: ${chunk.toString()}`);
|
||||
}
|
||||
|
||||
if (options?.processDataCallback) {
|
||||
options?.processDataCallback(chunk);
|
||||
}
|
||||
|
||||
if (
|
||||
options?.maxBuffer === undefined ||
|
||||
(options?.maxBuffer &&
|
||||
(!options.maxBuffer || totalStdoutLength < options.maxBuffer))
|
||||
) {
|
||||
stdoutChunks.push(chunk);
|
||||
totalStdoutLength += chunk.length;
|
||||
}
|
||||
|
||||
if (
|
||||
options?.maxBuffer === undefined ||
|
||||
(options?.maxBuffer &&
|
||||
totalStdoutLength >= options.maxBuffer &&
|
||||
!killSignalSent)
|
||||
) {
|
||||
process.kill();
|
||||
killSignalSent = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const stderrChunks = new Array<Buffer>();
|
||||
|
||||
// See comment above about stdout and asynchronous errors.
|
||||
if (process.stderr) {
|
||||
process.stderr.on("data", (chunk: Buffer) => {
|
||||
if (enableLogging) {
|
||||
console.log(`[${name}] stderr: ${chunk.toString()}`);
|
||||
}
|
||||
|
||||
if (options?.processDataCallback) {
|
||||
options?.processDataCallback(chunk);
|
||||
}
|
||||
|
||||
stderrChunks.push(chunk);
|
||||
});
|
||||
}
|
||||
|
||||
process.on("close", (code, signal) => {
|
||||
const stdout = Buffer.concat(
|
||||
stdoutChunks,
|
||||
options?.maxBuffer
|
||||
? Math.min(options?.maxBuffer, totalStdoutLength)
|
||||
: totalStdoutLength
|
||||
);
|
||||
|
||||
const stderr = Buffer.concat(stderrChunks);
|
||||
|
||||
// mimic the experience of GitProcess.exec for handling known codes when
|
||||
// the process terminates
|
||||
const exitCodes = options?.successExitCodes || new Set([0]);
|
||||
const acceptableExitCode = code !== null && exitCodes.has(code);
|
||||
|
||||
const shouldParseErrors = !(options?.parseErrors === false);
|
||||
|
||||
// Parse the error if acceptable exit code
|
||||
const gitError = shouldParseErrors
|
||||
? acceptableExitCode
|
||||
? GitProcess.parseError(stderr.toString()) ??
|
||||
GitProcess.parseError(stdout.toString())
|
||||
: null
|
||||
: null;
|
||||
|
||||
const gitErrorDescription = gitError
|
||||
? getDescriptionForError(gitError)
|
||||
: null;
|
||||
|
||||
// Check if the error is acceptable
|
||||
const acceptableError =
|
||||
gitError && options?.expectedErrors
|
||||
? options.expectedErrors.has(gitError)
|
||||
: false;
|
||||
|
||||
// Return the result
|
||||
const result: IGitResult = {
|
||||
output: stdout.toString("utf8"),
|
||||
error: stderr.toString("utf8"),
|
||||
exitCode: code,
|
||||
gitError,
|
||||
gitErrorDescription,
|
||||
path,
|
||||
};
|
||||
|
||||
if ((gitError && acceptableError) || acceptableExitCode || signal) {
|
||||
return resolve(result);
|
||||
}
|
||||
|
||||
rejectLog(args, result);
|
||||
reject(new GitError(result, args));
|
||||
});
|
||||
});
|
||||
}
|
||||
60
packages/noodl-git/src/core/client/common.ts
Normal file
60
packages/noodl-git/src/core/client/common.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { GitError as DugiteError } from 'dugite';
|
||||
import { GitExecutionOptions } from '../client';
|
||||
import { IGitResult } from '../git-error';
|
||||
import { getFileFromExceedsError } from '../helpers/regex';
|
||||
|
||||
export interface IGitRunOptions {
|
||||
/**
|
||||
* The name of the Git operation.
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* The path to the repository.
|
||||
*/
|
||||
path: string;
|
||||
|
||||
/**
|
||||
* Git arguments.
|
||||
*/
|
||||
args: string[];
|
||||
|
||||
enableLogging: boolean;
|
||||
|
||||
options?: GitExecutionOptions;
|
||||
}
|
||||
|
||||
export function rejectLog(
|
||||
args: string[],
|
||||
{ output, error, exitCode, gitError, gitErrorDescription }: IGitResult
|
||||
): void {
|
||||
// The caller should either handle this error, or expect that exit code.
|
||||
const errorMessage = new Array<string>();
|
||||
errorMessage.push(`\`git ${args.join(' ')}\` exited with an unexpected code: ${exitCode}.`);
|
||||
|
||||
if (output) {
|
||||
errorMessage.push('stdout:');
|
||||
errorMessage.push(output.toString());
|
||||
}
|
||||
|
||||
if (error) {
|
||||
errorMessage.push('stderr:');
|
||||
errorMessage.push(error.toString());
|
||||
}
|
||||
|
||||
if (gitError) {
|
||||
errorMessage.push(`(The error was parsed as ${gitError}: ${gitErrorDescription})`);
|
||||
}
|
||||
|
||||
if (gitError === DugiteError.PushWithFileSizeExceedingLimit) {
|
||||
const result = getFileFromExceedsError(errorMessage.join());
|
||||
const files = result.join('\n');
|
||||
|
||||
if (files !== '') {
|
||||
errorMessage.push('\n\nFile causing error:\n\n' + files);
|
||||
}
|
||||
}
|
||||
|
||||
console.error(errorMessage.join('\n'));
|
||||
console.error(`Git returned an unexpected exit code '${exitCode}' which should be handled by the caller.'`);
|
||||
}
|
||||
74
packages/noodl-git/src/core/clone.ts
Normal file
74
packages/noodl-git/src/core/clone.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { git, GitExecutionOptions, gitNetworkArguments } from './client';
|
||||
import { ICloneProgress } from './models/progress';
|
||||
import { CloneOptions } from './models/clone-options';
|
||||
import { CloneProgressParser, executionOptionsWithProgress } from './progress';
|
||||
import { DEFAULT_BRANCH } from '../constants';
|
||||
import { IGitResult } from './git-error';
|
||||
import { app } from '@electron/remote';
|
||||
import { join } from 'path';
|
||||
|
||||
/**
|
||||
* Clones a repository from a given url into to the specified path.
|
||||
*
|
||||
* @param url - The remote repository URL to clone from
|
||||
*
|
||||
* @param path - The destination path for the cloned repository. If the
|
||||
* path does not exist it will be created. Cloning into an
|
||||
* existing directory is only allowed if the directory is
|
||||
* empty.
|
||||
*
|
||||
* @param options - Options specific to the clone operation, see the
|
||||
* documentation for CloneOptions for more details.
|
||||
*
|
||||
* @param progressCallback - An optional function which will be invoked
|
||||
* with information about the current progress
|
||||
* of the clone operation. When provided this enables
|
||||
* the '--progress' command line flag for
|
||||
* 'git clone'.
|
||||
*
|
||||
*/
|
||||
export async function clone(
|
||||
url: string,
|
||||
path: string,
|
||||
options: CloneOptions,
|
||||
progressCallback?: (progress: ICloneProgress) => void
|
||||
): Promise<IGitResult> {
|
||||
const networkArguments = await gitNetworkArguments(null);
|
||||
const defaultBranch = options.defaultBranch ?? DEFAULT_BRANCH;
|
||||
|
||||
const args = [...networkArguments, '-c', `init.defaultBranch=${defaultBranch}`, 'clone', '--recursive'];
|
||||
|
||||
let opts: GitExecutionOptions = {};
|
||||
|
||||
if (progressCallback) {
|
||||
args.push('--progress');
|
||||
|
||||
const title = `Cloning into ${path}`;
|
||||
const kind = 'clone';
|
||||
|
||||
opts = await executionOptionsWithProgress({ ...opts }, new CloneProgressParser(), (progress) => {
|
||||
const description = progress.kind === 'progress' ? progress.details.text : progress.text;
|
||||
const value = progress.percent;
|
||||
|
||||
progressCallback({ kind, title, description, value });
|
||||
});
|
||||
|
||||
// Initial progress
|
||||
progressCallback({ kind, title, value: 0 });
|
||||
}
|
||||
|
||||
if (options.branch) {
|
||||
args.push('-b', options.branch);
|
||||
} else if (defaultBranch) {
|
||||
args.push('-b', defaultBranch);
|
||||
}
|
||||
|
||||
if (options.singleBranch) {
|
||||
args.push('--single-branch');
|
||||
}
|
||||
|
||||
args.push('--', url, path);
|
||||
|
||||
const cwdPath = join(app.getAppPath(), '..');
|
||||
return await git(args, cwdPath, 'clone', opts);
|
||||
}
|
||||
67
packages/noodl-git/src/core/commit.ts
Normal file
67
packages/noodl-git/src/core/commit.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { git } from "./client";
|
||||
import { parseCommitSHA } from "./git-error";
|
||||
|
||||
/** Grouping of information required to create a commit */
|
||||
interface ICommitContext {
|
||||
/**
|
||||
* The summary of the commit message (required)
|
||||
*/
|
||||
readonly summary: string;
|
||||
/**
|
||||
* Additional details for the commit message (optional)
|
||||
*/
|
||||
readonly description: string | null;
|
||||
/**
|
||||
* Whether or not it should amend the last commit (optional, default: false)
|
||||
*/
|
||||
readonly amend?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param repositoryDir repository directory
|
||||
* @param message commit message
|
||||
* @param amend amend the commit
|
||||
* @returns the commit long SHA
|
||||
*/
|
||||
export async function createCommit(
|
||||
repositoryDir: string,
|
||||
message: string,
|
||||
amend: boolean = false
|
||||
): Promise<string> {
|
||||
const args = ["-F", "-", "--allow-empty-message"];
|
||||
|
||||
if (amend) {
|
||||
args.push("--amend");
|
||||
}
|
||||
|
||||
// Create the commit
|
||||
const result = await git(["commit", ...args], repositoryDir, "createCommit", {
|
||||
stdin: message,
|
||||
spawn: false,
|
||||
});
|
||||
|
||||
// Parse the result where we want to get the short SHA
|
||||
const shortSha = parseCommitSHA(result);
|
||||
|
||||
// Try to get the long sha
|
||||
// Problem here is that we might get something like "(root-commit)"
|
||||
// in shortSha where we actually expect a short sha.
|
||||
//
|
||||
// This will occur on the first commit in the repository.
|
||||
try {
|
||||
// Retrieve the long sha since it's more reliable.
|
||||
const longShaResult = await git(
|
||||
["rev-parse", shortSha],
|
||||
repositoryDir,
|
||||
"createCommit"
|
||||
);
|
||||
|
||||
const longSha = longShaResult.output.toString().trim();
|
||||
return longSha;
|
||||
} catch (_e) {}
|
||||
|
||||
// If the previous call failed then we probably don't care about getting the
|
||||
// long sha anyways. To feel better we should probably check this properly and
|
||||
// actually return the correct value always.
|
||||
return null;
|
||||
}
|
||||
227
packages/noodl-git/src/core/config.ts
Normal file
227
packages/noodl-git/src/core/config.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
import { git } from './client';
|
||||
import { normalize } from 'path';
|
||||
import { app } from '@electron/remote';
|
||||
import { join } from 'path';
|
||||
|
||||
function appDir() {
|
||||
return join(app.getAppPath(), '..');
|
||||
}
|
||||
|
||||
/**
|
||||
* Look up a config value by name in the repository.
|
||||
*
|
||||
* @param onlyLocal Whether or not the value to be retrieved should stick to
|
||||
* the local repository settings. It is false by default. This
|
||||
* is equivalent to using the `--local` argument in the
|
||||
* `git config` invocation.
|
||||
*/
|
||||
export function getConfigValue(baseDir: string, name: string, onlyLocal: boolean = false): Promise<string | null> {
|
||||
return getConfigValueInPath(name, baseDir, onlyLocal);
|
||||
}
|
||||
|
||||
/** Look up a global config value by name. */
|
||||
export function getGlobalConfigValue(
|
||||
name: string,
|
||||
env?: {
|
||||
HOME: string;
|
||||
}
|
||||
): Promise<string | null> {
|
||||
return getConfigValueInPath(name, null, false, undefined, env);
|
||||
}
|
||||
|
||||
/**
|
||||
* Look up a global config value by name.
|
||||
*
|
||||
* Treats the returned value as a boolean as per Git's
|
||||
* own definition of a boolean configuration value (i.e.
|
||||
* 0 -> false, "off" -> false, "yes" -> true etc)
|
||||
*/
|
||||
export async function getGlobalBooleanConfigValue(
|
||||
name: string,
|
||||
env?: {
|
||||
HOME: string;
|
||||
}
|
||||
): Promise<boolean | null> {
|
||||
const value = await getConfigValueInPath(name, null, false, 'bool', env);
|
||||
return value === null ? null : value !== 'false';
|
||||
}
|
||||
|
||||
/**
|
||||
* Look up a config value by name
|
||||
*
|
||||
* @param path The path to execute the `git` command in. If null
|
||||
* we'll use the global configuration (i.e. --global)
|
||||
* and execute the Git call from the same location that
|
||||
* GitHub Desktop is installed in.
|
||||
* @param onlyLocal Whether or not the value to be retrieved should stick to
|
||||
* the local repository settings (if a path is specified). It
|
||||
* is false by default. It is equivalent to using the `--local`
|
||||
* argument in the `git config` invocation.
|
||||
* @param type Canonicalize configuration values according to the
|
||||
* expected type (i.e. 0 -> false, "on" -> true etc).
|
||||
* See `--type` documentation in `git config`
|
||||
*/
|
||||
async function getConfigValueInPath(
|
||||
name: string,
|
||||
path: string | null,
|
||||
onlyLocal: boolean = false,
|
||||
type?: 'bool' | 'int' | 'bool-or-int' | 'path' | 'expiry-date' | 'color',
|
||||
env?: {
|
||||
HOME: string;
|
||||
}
|
||||
): Promise<string | null> {
|
||||
const flags = ['config', '-z'];
|
||||
if (!path) {
|
||||
flags.push('--global');
|
||||
} else if (onlyLocal) {
|
||||
flags.push('--local');
|
||||
}
|
||||
|
||||
if (type !== undefined) {
|
||||
flags.push('--type', type);
|
||||
}
|
||||
|
||||
flags.push(name);
|
||||
|
||||
const result = await git(flags, path || appDir(), 'getConfigValueInPath', {
|
||||
successExitCodes: new Set([0, 1]),
|
||||
env
|
||||
});
|
||||
|
||||
// Git exits with 1 if the value isn't found. That's OK.
|
||||
if (result.exitCode === 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const output = result.output.toString();
|
||||
const pieces = output.split('\0');
|
||||
return pieces[0];
|
||||
}
|
||||
|
||||
/** Get the path to the global git config. */
|
||||
export async function getGlobalConfigPath(env?: { HOME: string }): Promise<string | null> {
|
||||
const options = env ? { env } : undefined;
|
||||
const result = await git(
|
||||
['config', '--global', '--list', '--show-origin', '--name-only', '-z'],
|
||||
appDir(),
|
||||
'getGlobalConfigPath',
|
||||
options
|
||||
);
|
||||
const segments = result.output.toString().split('\0');
|
||||
if (segments.length < 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const pathSegment = segments[0];
|
||||
if (!pathSegment.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const path = pathSegment.match(/file:(.+)/i);
|
||||
if (!path || path.length < 2) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return normalize(path[1]);
|
||||
}
|
||||
|
||||
/** Set the local config value by name. */
|
||||
export function setConfigValue(
|
||||
baseDir: string,
|
||||
name: string,
|
||||
value: string,
|
||||
env?: {
|
||||
HOME: string;
|
||||
}
|
||||
): Promise<void> {
|
||||
return setConfigValueInPath(name, value, baseDir, env);
|
||||
}
|
||||
|
||||
/** Set the global config value by name. */
|
||||
export async function setGlobalConfigValue(
|
||||
name: string,
|
||||
value: string,
|
||||
env?: {
|
||||
HOME: string;
|
||||
}
|
||||
): Promise<void> {
|
||||
return setConfigValueInPath(name, value, null, env);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set config value by name
|
||||
*
|
||||
* @param path The path to execute the `git` command in. If null
|
||||
* we'll use the global configuration (i.e. --global)
|
||||
* and execute the Git call from the same location that
|
||||
* GitHub Desktop is installed in.
|
||||
*/
|
||||
async function setConfigValueInPath(
|
||||
name: string,
|
||||
value: string,
|
||||
path: string | null,
|
||||
env?: {
|
||||
HOME: string;
|
||||
}
|
||||
): Promise<void> {
|
||||
const options = env ? { env } : undefined;
|
||||
|
||||
const flags = ['config'];
|
||||
|
||||
if (!path) {
|
||||
flags.push('--global');
|
||||
}
|
||||
|
||||
flags.push('--replace-all', name, value);
|
||||
|
||||
await git(flags, path || appDir(), 'setConfigValueInPath', options);
|
||||
}
|
||||
|
||||
/** Remove the local config value by name. */
|
||||
export async function removeConfigValue(
|
||||
filePath: string,
|
||||
name: string,
|
||||
env?: {
|
||||
HOME: string;
|
||||
}
|
||||
): Promise<void> {
|
||||
return removeConfigValueInPath(name, filePath, env);
|
||||
}
|
||||
|
||||
/** Remove the global config value by name. */
|
||||
export async function removeGlobalConfigValue(
|
||||
name: string,
|
||||
env?: {
|
||||
HOME: string;
|
||||
}
|
||||
): Promise<void> {
|
||||
return removeConfigValueInPath(name, null, env);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove config value by name
|
||||
*
|
||||
* @param path The path to execute the `git` command in. If null
|
||||
* we'll use the global configuration (i.e. --global)
|
||||
* and execute the Git call from the same location that
|
||||
* GitHub Desktop is installed in.
|
||||
*/
|
||||
async function removeConfigValueInPath(
|
||||
name: string,
|
||||
path: string | null,
|
||||
env?: {
|
||||
HOME: string;
|
||||
}
|
||||
): Promise<void> {
|
||||
const options = env ? { env } : undefined;
|
||||
|
||||
const flags = ['config'];
|
||||
|
||||
if (!path) {
|
||||
flags.push('--global');
|
||||
}
|
||||
|
||||
flags.push('--unset-all', name);
|
||||
|
||||
await git(flags, path || appDir(), 'removeConfigValueInPath', options);
|
||||
}
|
||||
34
packages/noodl-git/src/core/diff-check.ts
Normal file
34
packages/noodl-git/src/core/diff-check.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { git } from './client';
|
||||
import { getCaptures } from './helpers/regex';
|
||||
|
||||
/**
|
||||
* Returns a list of files with conflict markers present
|
||||
*
|
||||
* @param repositoryPath filepath to repository
|
||||
* @returns filepaths with their number of conflicted markers
|
||||
*/
|
||||
export async function getFilesWithConflictMarkers(repositoryPath: string): Promise<Map<string, number>> {
|
||||
// git operation
|
||||
const args = ['diff', '--check'];
|
||||
const { output } = await git(args, repositoryPath, 'getFilesWithConflictMarkers', {
|
||||
successExitCodes: new Set([0, 2]),
|
||||
spawn: true
|
||||
});
|
||||
|
||||
// result parsing
|
||||
const captures = getCaptures(output, fileNameCaptureRe);
|
||||
if (captures.length === 0) {
|
||||
return new Map<string, number>();
|
||||
}
|
||||
// flatten the list (only does one level deep)
|
||||
const flatCaptures = captures.reduce((acc, val) => acc.concat(val));
|
||||
// count number of occurrences
|
||||
const counted = flatCaptures.reduce((acc, val) => acc.set(val, (acc.get(val) || 0) + 1), new Map<string, number>());
|
||||
return counted;
|
||||
}
|
||||
|
||||
/**
|
||||
* matches a line reporting a leftover conflict marker
|
||||
* and captures the name of the file
|
||||
*/
|
||||
const fileNameCaptureRe = /(.+):\d+: leftover conflict marker/gi;
|
||||
87
packages/noodl-git/src/core/diff-tree.ts
Normal file
87
packages/noodl-git/src/core/diff-tree.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { git } from "./client";
|
||||
import { AppFileStatus, FileChange, FileStatusKind } from "./models/status";
|
||||
|
||||
/**
|
||||
*
|
||||
* $ git show --format="" --name-status -z --no-color c0bfb14
|
||||
*
|
||||
* @param repositoryDir
|
||||
* @param commitish
|
||||
*/
|
||||
export async function getCommitFiles(
|
||||
repositoryDir: string,
|
||||
commitish: string
|
||||
): Promise<readonly FileChange[]> {
|
||||
const args = [
|
||||
"show",
|
||||
"--format=oneline",
|
||||
"--name-status",
|
||||
"-z",
|
||||
"--no-color",
|
||||
commitish,
|
||||
];
|
||||
|
||||
const { output } = await git(args, repositoryDir, "getCommitFiles");
|
||||
const segments = output
|
||||
.toString()
|
||||
.split("\u0000")
|
||||
.filter((s) => s.length > 0);
|
||||
|
||||
const result: FileChange[] = [];
|
||||
|
||||
// Possible status letters are:
|
||||
//
|
||||
// A: addition of a file
|
||||
// C: copy of a file into a new one
|
||||
// D: deletion of a file
|
||||
// M: modification of the contents or mode of a file
|
||||
// R: renaming of a file
|
||||
// T: change in the type of the file (regular file, symbolic link or submodule)
|
||||
// U: file is unmerged (you must complete the merge before it can be committed)
|
||||
// X: "unknown" change type (most probably a bug, please report it)
|
||||
//
|
||||
// TODO: Add support for C, T, U codes
|
||||
//
|
||||
for (let index = 1; index < segments.length; ) {
|
||||
const modifier = segments[index];
|
||||
if (/^[a-z0-9]{40}/.test(modifier)) {
|
||||
index += 1;
|
||||
} else {
|
||||
const filePath = segments[index + 1];
|
||||
|
||||
if (modifier.startsWith("A")) {
|
||||
result.push(
|
||||
new FileChange(filePath, {
|
||||
kind: FileStatusKind.New,
|
||||
})
|
||||
);
|
||||
} else if (modifier.startsWith("M")) {
|
||||
result.push(
|
||||
new FileChange(filePath, {
|
||||
kind: FileStatusKind.Modified,
|
||||
})
|
||||
);
|
||||
} else if (modifier.startsWith("D")) {
|
||||
result.push(
|
||||
new FileChange(filePath, {
|
||||
kind: FileStatusKind.Deleted,
|
||||
})
|
||||
);
|
||||
} else if (modifier.startsWith("R")) {
|
||||
const newFilePath = segments[index + 1];
|
||||
result.push(
|
||||
new FileChange(newFilePath, {
|
||||
kind: FileStatusKind.Renamed,
|
||||
oldPath: filePath,
|
||||
})
|
||||
);
|
||||
index += 1;
|
||||
}
|
||||
|
||||
index += 2;
|
||||
}
|
||||
}
|
||||
|
||||
//Rev ranges might have duplicates, so naively just dedupe based on the path
|
||||
return [...new Map(result.map((r) => [r.path, r])).values()];
|
||||
}
|
||||
229
packages/noodl-git/src/core/diff.ts
Normal file
229
packages/noodl-git/src/core/diff.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
import {
|
||||
WorkingDirectoryFileChange,
|
||||
FileChange,
|
||||
FileStatusKind,
|
||||
} from "./models/status";
|
||||
import { IUnrenderableDiff, DiffType, IDiff } from "./models/diff";
|
||||
import { git } from "./client";
|
||||
import { getCaptures } from "./helpers/regex";
|
||||
|
||||
type LineEnding = "CR" | "LF" | "CRLF";
|
||||
export type LineEndingsChange = {
|
||||
from: LineEnding;
|
||||
to: LineEnding;
|
||||
};
|
||||
|
||||
/** Parse the line ending string into an enum value (or `null` if unknown) */
|
||||
export function parseLineEndingText(text: string): LineEnding | null {
|
||||
const input = text.trim();
|
||||
switch (input) {
|
||||
case "CR":
|
||||
return "CR";
|
||||
case "LF":
|
||||
return "LF";
|
||||
case "CRLF":
|
||||
return "CRLF";
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* V8 has a limit on the size of string it can create (~256MB), and unless we want to
|
||||
* trigger an unhandled exception we need to do the encoding conversion by hand.
|
||||
*
|
||||
* This is a hard limit on how big a buffer can be and still be converted into
|
||||
* a string.
|
||||
*/
|
||||
const MaxDiffBufferSize = 70e6; // 70MB in decimal
|
||||
|
||||
/**
|
||||
* Utility function to check whether parsing this buffer is going to cause
|
||||
* issues at runtime.
|
||||
*
|
||||
* @param buffer A buffer of binary text from a spawned process
|
||||
*/
|
||||
function isValidBuffer(buffer: Buffer) {
|
||||
return buffer.length <= MaxDiffBufferSize;
|
||||
}
|
||||
|
||||
function buildDiff(
|
||||
buffer: Buffer,
|
||||
repositoryDir: string,
|
||||
file: FileChange,
|
||||
commitish: string,
|
||||
lineEndingsChange?: LineEndingsChange
|
||||
): Promise<IDiff> {
|
||||
if (!isValidBuffer(buffer)) {
|
||||
// the buffer's diff is too large to be renderable in the UI
|
||||
return Promise.resolve<IUnrenderableDiff>({ kind: DiffType.Unrenderable });
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render the difference between a file in the given commit and its parent
|
||||
*
|
||||
* @param commitish A commit SHA or some other identifier that ultimately dereferences
|
||||
* to a commit.
|
||||
*/
|
||||
export async function getCommitDiff(
|
||||
repositoryDir: string,
|
||||
file: FileChange,
|
||||
commitish: string,
|
||||
hideWhitespaceInDiff: boolean = false
|
||||
): Promise<IDiff> {
|
||||
const args = [
|
||||
"log",
|
||||
commitish,
|
||||
...(hideWhitespaceInDiff ? ["-w"] : []),
|
||||
"-m",
|
||||
"-1",
|
||||
"--first-parent",
|
||||
"--patch-with-raw",
|
||||
"-z",
|
||||
"--no-color",
|
||||
"--",
|
||||
file.path,
|
||||
];
|
||||
|
||||
if (
|
||||
file.status.kind === FileStatusKind.Renamed ||
|
||||
file.status.kind === FileStatusKind.Copied
|
||||
) {
|
||||
args.push(file.status.oldPath);
|
||||
}
|
||||
|
||||
const { output } = await git(args, repositoryDir, "getCommitDiff");
|
||||
|
||||
return buildDiff(Buffer.from(output), repositoryDir, file, commitish);
|
||||
}
|
||||
|
||||
/**
|
||||
* Render the diff for a file within the repository working directory. The file will be
|
||||
* compared against HEAD if it's tracked, if not it'll be compared to an empty file meaning
|
||||
* that all content in the file will be treated as additions.
|
||||
*/
|
||||
export async function getWorkingDirectoryDiff(
|
||||
repositoryDir: string,
|
||||
file: WorkingDirectoryFileChange,
|
||||
hideWhitespaceInDiff: boolean = false
|
||||
): Promise<IDiff> {
|
||||
// `--no-ext-diff` should be provided wherever we invoke `git diff` so that any
|
||||
// diff.external program configured by the user is ignored
|
||||
const args = [
|
||||
"diff",
|
||||
...(hideWhitespaceInDiff ? ["-w"] : []),
|
||||
"--no-ext-diff",
|
||||
"--patch-with-raw",
|
||||
"-z",
|
||||
"--no-color",
|
||||
];
|
||||
const successExitCodes = new Set([0]);
|
||||
|
||||
if (
|
||||
file.status.kind === FileStatusKind.New ||
|
||||
file.status.kind === FileStatusKind.Untracked
|
||||
) {
|
||||
// `git diff --no-index` seems to emulate the exit codes from `diff` irrespective of
|
||||
// whether you set --exit-code
|
||||
//
|
||||
// this is the behavior:
|
||||
// - 0 if no changes found
|
||||
// - 1 if changes found
|
||||
// - and error otherwise
|
||||
//
|
||||
// citation in source:
|
||||
// https://github.com/git/git/blob/1f66975deb8402131fbf7c14330d0c7cdebaeaa2/diff-no-index.c#L300
|
||||
successExitCodes.add(1);
|
||||
args.push("--no-index", "--", "/dev/null", file.path);
|
||||
} else if (file.status.kind === FileStatusKind.Renamed) {
|
||||
// NB: Technically this is incorrect, the best kind of incorrect.
|
||||
// In order to show exactly what will end up in the commit we should
|
||||
// perform a diff between the new file and the old file as it appears
|
||||
// in HEAD. By diffing against the index we won't show any changes
|
||||
// already staged to the renamed file which differs from our other diffs.
|
||||
// The closest I got to that was running hash-object and then using
|
||||
// git diff <blob> <blob> but that seems a bit excessive.
|
||||
args.push("--", file.path);
|
||||
} else {
|
||||
args.push("HEAD", "--", file.path);
|
||||
}
|
||||
|
||||
const { output, error } = await git(
|
||||
args,
|
||||
repositoryDir,
|
||||
"getWorkingDirectoryDiff",
|
||||
{ successExitCodes }
|
||||
);
|
||||
const lineEndingsChange = parseLineEndingsWarning(error);
|
||||
|
||||
return buildDiff(
|
||||
Buffer.from(output),
|
||||
repositoryDir,
|
||||
file,
|
||||
"HEAD",
|
||||
lineEndingsChange
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* `git diff` will write out messages about the line ending changes it knows
|
||||
* about to `stderr` - this rule here will catch this and also the to/from
|
||||
* changes based on what the user has configured.
|
||||
*/
|
||||
const lineEndingsChangeRegex =
|
||||
/warning: (CRLF|CR|LF) will be replaced by (CRLF|CR|LF) in .*/;
|
||||
|
||||
/**
|
||||
* Utility function for inspecting the stderr output for the line endings
|
||||
* warning that Git may report.
|
||||
*
|
||||
* @param error A buffer of binary text from a spawned process
|
||||
*/
|
||||
function parseLineEndingsWarning(error: string): LineEndingsChange | undefined {
|
||||
if (error.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const match = lineEndingsChangeRegex.exec(error);
|
||||
if (match) {
|
||||
const from = parseLineEndingText(match[1]);
|
||||
const to = parseLineEndingText(match[2]);
|
||||
if (from && to) {
|
||||
return { from, to };
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* List the modified binary files' paths in the given repository
|
||||
*
|
||||
* @param repository to run git operation in
|
||||
* @param ref ref (sha, branch, etc) to compare the working index against
|
||||
*
|
||||
* if you're mid-merge pass `'MERGE_HEAD'` to ref to get a diff of `HEAD` vs `MERGE_HEAD`,
|
||||
* otherwise you should probably pass `'HEAD'` to get a diff of the working tree vs `HEAD`
|
||||
*/
|
||||
export async function getBinaryPaths(
|
||||
repositoryDir: string,
|
||||
ref: string
|
||||
): Promise<ReadonlyArray<string>> {
|
||||
const { output } = await git(
|
||||
["diff", "--numstat", "-z", ref],
|
||||
repositoryDir,
|
||||
"getBinaryPaths"
|
||||
);
|
||||
const captures = getCaptures(output, binaryListRegex);
|
||||
if (captures.length === 0) {
|
||||
return [];
|
||||
}
|
||||
// flatten the list (only does one level deep)
|
||||
const flatCaptures = captures.reduce((acc, val) => acc.concat(val));
|
||||
return flatCaptures;
|
||||
}
|
||||
|
||||
const binaryListRegex = /-\t-\t(?:\0.+\0)?([^\0]*)/gi;
|
||||
30
packages/noodl-git/src/core/errno-exception.ts
Normal file
30
packages/noodl-git/src/core/errno-exception.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
/**
|
||||
* A type describing a specific type of errors thrown by Node.js
|
||||
* when encountering errors in low-level operations (such as IO, network,
|
||||
* processes) containing additional information related to the error
|
||||
* itself.
|
||||
*/
|
||||
interface IErrnoException extends Error {
|
||||
/**
|
||||
* The string name for a numeric error code that comes from a Node.js API.
|
||||
* See https://nodejs.org/api/util.html#util_util_getsystemerrorname_err
|
||||
*/
|
||||
readonly code: string;
|
||||
|
||||
/**
|
||||
* The "system call" (i.e. the Node abstraction) such as 'spawn', 'open', etc
|
||||
* which was responsible for triggering the exception.
|
||||
*
|
||||
* See https://github.com/nodejs/node/blob/v10.16.0/lib/internal/errors.js#L333-L351
|
||||
*/
|
||||
readonly syscall: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether the given object conforms to the shape of an
|
||||
* internal Node.js low-level exception, see IErrnoException for
|
||||
* more details.
|
||||
*/
|
||||
export function isErrnoException(err: any): err is IErrnoException {
|
||||
return err instanceof Error && typeof (err as any).code === 'string' && typeof (err as any).syscall === 'string';
|
||||
}
|
||||
139
packages/noodl-git/src/core/fetch.ts
Normal file
139
packages/noodl-git/src/core/fetch.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import { git, GitExecutionOptions, gitNetworkArguments } from "./client";
|
||||
import { IFetchProgress } from "./models/progress";
|
||||
import { FetchProgressParser, executionOptionsWithProgress } from "./progress";
|
||||
import { IRemote } from "./models/remote";
|
||||
import { ITrackingBranch } from "./models/branch";
|
||||
import { IGitResult } from "./git-error";
|
||||
|
||||
async function getFetchArgs(
|
||||
repositoryDir: string,
|
||||
remote: string,
|
||||
progressCallback?: (progress: IFetchProgress) => void
|
||||
) {
|
||||
const networkArguments = await gitNetworkArguments(repositoryDir);
|
||||
|
||||
return progressCallback != null
|
||||
? [...networkArguments, "fetch", "--progress", "--prune", remote]
|
||||
: [...networkArguments, "fetch", "--prune", remote];
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from the given remote.
|
||||
*
|
||||
* @param repository - The repository to fetch into
|
||||
*
|
||||
* @param remote - The remote to fetch from
|
||||
*
|
||||
* @param progressCallback - An optional function which will be invoked
|
||||
* with information about the current progress
|
||||
* of the fetch operation. When provided this enables
|
||||
* the '--progress' command line flag for
|
||||
* 'git fetch'.
|
||||
*/
|
||||
export async function fetch(
|
||||
repositoryDir: string,
|
||||
remote: IRemote,
|
||||
progressCallback?: (progress: IFetchProgress) => void
|
||||
): Promise<IGitResult> {
|
||||
let opts: GitExecutionOptions = {
|
||||
successExitCodes: new Set([0]),
|
||||
};
|
||||
|
||||
if (progressCallback) {
|
||||
const title = `Fetching ${remote.name}`;
|
||||
const kind = "fetch";
|
||||
|
||||
opts = await executionOptionsWithProgress(
|
||||
{ ...opts },
|
||||
new FetchProgressParser(),
|
||||
(progress) => {
|
||||
// In addition to progress output from the remote end and from
|
||||
// git itself, the stderr output from pull contains information
|
||||
// about ref updates. We don't need to bring those into the progress
|
||||
// stream so we'll just punt on anything we don't know about for now.
|
||||
if (progress.kind === "context") {
|
||||
if (!progress.text.startsWith("remote: Counting objects")) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const description =
|
||||
progress.kind === "progress" ? progress.details.text : progress.text;
|
||||
const value = progress.percent;
|
||||
|
||||
progressCallback({
|
||||
kind,
|
||||
title,
|
||||
description,
|
||||
value,
|
||||
remote: remote.name,
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
const args = await getFetchArgs(repositoryDir, remote.name, progressCallback);
|
||||
return await git(args, repositoryDir, "fetch", opts);
|
||||
}
|
||||
|
||||
/** Fetch a given refspec from the given remote. */
|
||||
export async function fetchRefspec(
|
||||
repositoryDir: string,
|
||||
remote: IRemote,
|
||||
refspec: string
|
||||
): Promise<void> {
|
||||
const options = {
|
||||
successExitCodes: new Set([0, 128]),
|
||||
env: {},
|
||||
};
|
||||
|
||||
const networkArguments = await gitNetworkArguments(repositoryDir);
|
||||
|
||||
const args = [...networkArguments, "fetch", remote.name, refspec];
|
||||
|
||||
await git(args, repositoryDir, "fetchRefspec", options);
|
||||
}
|
||||
|
||||
export async function fastForwardBranches(
|
||||
repositoryDir: string,
|
||||
branches: ReadonlyArray<ITrackingBranch>
|
||||
): Promise<void> {
|
||||
if (branches.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const refPairs = branches.map(
|
||||
(branch) => `${branch.upstreamRef}:${branch.ref}`
|
||||
);
|
||||
|
||||
const opts: GitExecutionOptions = {
|
||||
// Fetch exits with an exit code of 1 if one or more refs failed to update
|
||||
// which is what we expect will happen
|
||||
successExitCodes: new Set([0, 1]),
|
||||
env: {
|
||||
// This will make sure the reflog entries are correct after
|
||||
// fast-forwarding the branches.
|
||||
GIT_REFLOG_ACTION: "pull",
|
||||
},
|
||||
stdin: refPairs.join("\n"),
|
||||
};
|
||||
|
||||
await git(
|
||||
[
|
||||
"fetch",
|
||||
".",
|
||||
// Make sure we don't try to update branches that can't be fast-forwarded
|
||||
// even if the user disabled this via the git config option
|
||||
// `fetch.showForcedUpdates`
|
||||
"--show-forced-updates",
|
||||
// Prevent `git fetch` from touching the `FETCH_HEAD`
|
||||
"--no-write-fetch-head",
|
||||
// Take branch refs from stdin to circumvent shell max line length
|
||||
// limitations (mainly on Windows)
|
||||
"--stdin",
|
||||
],
|
||||
repositoryDir,
|
||||
"fastForwardBranches",
|
||||
opts
|
||||
);
|
||||
}
|
||||
222
packages/noodl-git/src/core/for-each-ref.ts
Normal file
222
packages/noodl-git/src/core/for-each-ref.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import { git } from "./client";
|
||||
import { GitError } from "dugite";
|
||||
import {
|
||||
Branch,
|
||||
BranchType,
|
||||
IBranchTip,
|
||||
ITrackingBranch,
|
||||
} from "./models/branch";
|
||||
import { CommitIdentity } from "./models/commit-identity";
|
||||
import { createForEachRefParser } from "./git-delimiter-parser";
|
||||
|
||||
/** Get all the branches. */
|
||||
export async function getBranches(
|
||||
repositoryDir: string,
|
||||
...prefixes: string[]
|
||||
): Promise<ReadonlyArray<Branch>> {
|
||||
const { formatArgs, parse } = createForEachRefParser({
|
||||
fullName: "%(refname)",
|
||||
shortName: "%(refname:short)",
|
||||
upstreamShortName: "%(upstream:short)",
|
||||
sha: "%(objectname)",
|
||||
author: "%(author)",
|
||||
symRef: "%(symref)",
|
||||
});
|
||||
|
||||
if (!prefixes || !prefixes.length) {
|
||||
prefixes = ["refs/heads", "refs/remotes"];
|
||||
}
|
||||
|
||||
const result = await git(
|
||||
["for-each-ref", ...formatArgs, ...prefixes],
|
||||
repositoryDir,
|
||||
"getBranches",
|
||||
{
|
||||
expectedErrors: new Set([GitError.NotAGitRepository]),
|
||||
}
|
||||
);
|
||||
|
||||
if (result.gitError === GitError.NotAGitRepository) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const allBranches: Branch[] = [];
|
||||
|
||||
const parsedLines = parse(result.output.toString());
|
||||
for (const ref of parsedLines) {
|
||||
// excude symbolic refs from the branch list
|
||||
if (ref.symRef.length > 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const author = CommitIdentity.parseIdentity(ref.author);
|
||||
const tip: IBranchTip = { sha: ref.sha, author };
|
||||
|
||||
const type = ref.fullName.startsWith("refs/heads")
|
||||
? BranchType.Local
|
||||
: BranchType.Remote;
|
||||
|
||||
const upstream =
|
||||
ref.upstreamShortName.length > 0 ? ref.upstreamShortName : null;
|
||||
|
||||
allBranches.push(
|
||||
new Branch(ref.shortName, upstream, tip, type, ref.fullName)
|
||||
);
|
||||
}
|
||||
|
||||
// Combine into one array where "Branch.remote" will be the remote branch if it exists.
|
||||
const branches: Branch[] = [];
|
||||
|
||||
for (const branch of allBranches) {
|
||||
const index = branches.findIndex((x) => x.upstream == branch.name);
|
||||
|
||||
if (index !== -1) {
|
||||
branches[index] =
|
||||
branch.type === BranchType.Local
|
||||
? branch.withRemote(branches[index])
|
||||
: branches[index].withRemote(branch);
|
||||
} else {
|
||||
branches.push(branch);
|
||||
}
|
||||
}
|
||||
|
||||
return branches;
|
||||
}
|
||||
|
||||
/** Get all the branches. */
|
||||
export async function getBranchesOld(
|
||||
repositoryDir: string,
|
||||
...prefixes: string[]
|
||||
): Promise<ReadonlyArray<Branch>> {
|
||||
const { formatArgs, parse } = createForEachRefParser({
|
||||
fullName: "%(refname)",
|
||||
shortName: "%(refname:short)",
|
||||
upstreamShortName: "%(upstream:short)",
|
||||
sha: "%(objectname)",
|
||||
author: "%(author)",
|
||||
symRef: "%(symref)",
|
||||
});
|
||||
|
||||
if (!prefixes || !prefixes.length) {
|
||||
prefixes = ["refs/heads", "refs/remotes"];
|
||||
}
|
||||
|
||||
const result = await git(
|
||||
["for-each-ref", ...formatArgs, ...prefixes],
|
||||
repositoryDir,
|
||||
"getBranches",
|
||||
{
|
||||
expectedErrors: new Set([GitError.NotAGitRepository]),
|
||||
}
|
||||
);
|
||||
|
||||
if (result.gitError === GitError.NotAGitRepository) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const allBranches: Branch[] = [];
|
||||
|
||||
const parsedLines = parse(result.output.toString());
|
||||
for (const ref of parsedLines) {
|
||||
// excude symbolic refs from the branch list
|
||||
if (ref.symRef.length > 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const author = CommitIdentity.parseIdentity(ref.author);
|
||||
const tip: IBranchTip = { sha: ref.sha, author };
|
||||
|
||||
const type = ref.fullName.startsWith("refs/heads")
|
||||
? BranchType.Local
|
||||
: BranchType.Remote;
|
||||
|
||||
const upstream =
|
||||
ref.upstreamShortName.length > 0 ? ref.upstreamShortName : null;
|
||||
|
||||
allBranches.push(
|
||||
new Branch(ref.shortName, upstream, tip, type, ref.fullName)
|
||||
);
|
||||
}
|
||||
|
||||
return allBranches;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all branches that differ from their upstream (i.e. they're ahead,
|
||||
* behind or both), excluding the current branch.
|
||||
* Useful to narrow down a list of branches that could potentially be fast
|
||||
* forwarded.
|
||||
*
|
||||
* @param repositoryDir Repository to get the branches from.
|
||||
*/
|
||||
export async function getBranchesDifferingFromUpstream(
|
||||
repositoryDir: string
|
||||
): Promise<ReadonlyArray<ITrackingBranch>> {
|
||||
const { formatArgs, parse } = createForEachRefParser({
|
||||
fullName: "%(refname)",
|
||||
sha: "%(objectname)", // SHA
|
||||
upstream: "%(upstream)",
|
||||
symref: "%(symref)",
|
||||
head: "%(HEAD)",
|
||||
});
|
||||
|
||||
const prefixes = ["refs/heads", "refs/remotes"];
|
||||
|
||||
const result = await git(
|
||||
["for-each-ref", ...formatArgs, ...prefixes],
|
||||
repositoryDir,
|
||||
"getBranchesDifferingFromUpstream",
|
||||
{ expectedErrors: new Set([GitError.NotAGitRepository]) }
|
||||
);
|
||||
|
||||
if (result.gitError === GitError.NotAGitRepository) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const localBranches = [];
|
||||
const remoteBranchShas = new Map<string, string>();
|
||||
|
||||
// First we need to collect the relevant info from the command output:
|
||||
// - For local branches with upstream: name, ref, SHA and the upstream.
|
||||
// - For remote branches we only need the sha (and the ref as key).
|
||||
for (const ref of parse(result.output.toString())) {
|
||||
if (ref.symref.length > 0 || ref.head === "*") {
|
||||
// Exclude symbolic refs and the current branch
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ref.fullName.startsWith("refs/heads")) {
|
||||
if (ref.upstream.length === 0) {
|
||||
// Exclude local branches without upstream
|
||||
continue;
|
||||
}
|
||||
|
||||
localBranches.push({
|
||||
ref: ref.fullName,
|
||||
sha: ref.sha,
|
||||
upstream: ref.upstream,
|
||||
});
|
||||
} else {
|
||||
remoteBranchShas.set(ref.fullName, ref.sha);
|
||||
}
|
||||
}
|
||||
|
||||
const eligibleBranches = new Array<ITrackingBranch>();
|
||||
|
||||
// Compare the SHA of every local branch with the SHA of its upstream and
|
||||
// collect the names of local branches that differ from their upstream.
|
||||
for (const branch of localBranches) {
|
||||
const remoteSha = remoteBranchShas.get(branch.upstream);
|
||||
|
||||
if (remoteSha !== undefined && remoteSha !== branch.sha) {
|
||||
eligibleBranches.push({
|
||||
ref: branch.ref,
|
||||
sha: branch.sha,
|
||||
upstreamRef: branch.upstream,
|
||||
upstreamSha: remoteSha,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return eligibleBranches;
|
||||
}
|
||||
93
packages/noodl-git/src/core/git-delimiter-parser.ts
Normal file
93
packages/noodl-git/src/core/git-delimiter-parser.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
/**
|
||||
* Create a new parser suitable for parsing --format output from commands such
|
||||
* as `git log`, `git stash`, and other commands that are not derived from
|
||||
* `ref-filter`.
|
||||
*
|
||||
* Returns an object with the arguments that need to be appended to the git
|
||||
* call and the parse function itself
|
||||
*
|
||||
* @param fields An object keyed on the friendly name of the value being
|
||||
* parsed with the value being the format string of said value.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* `const { args, parse } = createLogParser({ sha: '%H' })`
|
||||
*
|
||||
*/
|
||||
export function createLogParser<T extends Record<string, string>>(fields: T) {
|
||||
const keys: Array<keyof T> = Object.keys(fields);
|
||||
const format = Object.values(fields).join("%x00");
|
||||
const formatArgs = ["-z", `--format=${format}`];
|
||||
|
||||
const parse = (value: string) => {
|
||||
const records = value.split("\0");
|
||||
const entries = [];
|
||||
|
||||
for (let i = 0; i < records.length - keys.length; i += keys.length) {
|
||||
const entry = {} as { [K in keyof T]: string };
|
||||
keys.forEach((key, ix) => (entry[key] = records[i + ix]));
|
||||
entries.push(entry);
|
||||
}
|
||||
|
||||
return entries;
|
||||
};
|
||||
|
||||
return { formatArgs, parse };
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new parser suitable for parsing --format output from commands such
|
||||
* as `git for-each-ref`, `git branch`, and other commands that are not derived
|
||||
* from `git log`.
|
||||
*
|
||||
* Returns an object with the arguments that need to be appended to the git
|
||||
* call and the parse function itself
|
||||
*
|
||||
* @param fields An object keyed on the friendly name of the value being
|
||||
* parsed with the value being the format string of said value.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* `const { args, parse } = createForEachRefParser({ sha: '%(objectname)' })`
|
||||
*
|
||||
*/
|
||||
export function createForEachRefParser<T extends Record<string, string>>(
|
||||
fields: T
|
||||
) {
|
||||
const keys: Array<keyof T> = Object.keys(fields);
|
||||
const format = Object.values(fields).join("%00");
|
||||
const formatArgs = [`--format=%00${format}%00`];
|
||||
|
||||
const parse = (value: string) => {
|
||||
const records = value.split("\0");
|
||||
const entries = new Array<{ [K in keyof T]: string }>();
|
||||
|
||||
let entry;
|
||||
let consumed = 0;
|
||||
|
||||
// start at 1 to avoid 0 modulo X problem. The first record is guaranteed
|
||||
// to be empty anyway (due to %00 at the start of --format)
|
||||
for (let i = 1; i < records.length - 1; i++) {
|
||||
if (i % (keys.length + 1) === 0) {
|
||||
if (records[i] !== "\n") {
|
||||
throw new Error("Expected newline");
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
entry = entry ?? ({} as { [K in keyof T]: string });
|
||||
const key = keys[consumed % keys.length];
|
||||
entry[key] = records[i];
|
||||
consumed++;
|
||||
|
||||
if (consumed % keys.length === 0) {
|
||||
entries.push(entry);
|
||||
entry = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
};
|
||||
|
||||
return { formatArgs, parse };
|
||||
}
|
||||
222
packages/noodl-git/src/core/git-error.ts
Normal file
222
packages/noodl-git/src/core/git-error.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import { GitError as DugiteError } from "dugite";
|
||||
|
||||
/**
|
||||
* Returns the SHA of the passed in IGitResult
|
||||
*/
|
||||
export function parseCommitSHA(result: IGitResult): string {
|
||||
return result.output.toString().split("]")[0].split(" ")[1];
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of using `git`.
|
||||
*/
|
||||
export interface IGitResult {
|
||||
/** The contents of stdout received from the spawned process */
|
||||
readonly output: string;
|
||||
|
||||
/** The contents of stderr received from the spawned process */
|
||||
readonly error: string;
|
||||
|
||||
/** The exit code returned by the spawned process */
|
||||
readonly exitCode: number | null;
|
||||
|
||||
/**
|
||||
* The parsed git error. This will be null when the exit code is included in
|
||||
* the `successExitCodes`, or when dugite was unable to parse the
|
||||
* error.
|
||||
*/
|
||||
readonly gitError: DugiteError | null;
|
||||
|
||||
/** The human-readable error description, based on `gitError`. */
|
||||
readonly gitErrorDescription: string | null;
|
||||
|
||||
/**
|
||||
* The path that the Git command was executed from, i.e. the
|
||||
* process working directory (not to be confused with the Git
|
||||
* working directory which is... super confusing, I know)
|
||||
*/
|
||||
readonly path: string;
|
||||
}
|
||||
|
||||
export class GitError extends Error {
|
||||
/** The result from the failed command. */
|
||||
public readonly result: IGitResult;
|
||||
|
||||
/** The args for the failed command. */
|
||||
public readonly args: ReadonlyArray<string>;
|
||||
|
||||
/**
|
||||
* Whether or not the error message is just the raw output of the git command.
|
||||
*/
|
||||
public readonly isRawMessage: boolean;
|
||||
|
||||
public constructor(result: IGitResult, args: ReadonlyArray<string>) {
|
||||
let rawMessage = true;
|
||||
let message: string;
|
||||
|
||||
if (result.gitErrorDescription) {
|
||||
message = result.gitErrorDescription;
|
||||
rawMessage = false;
|
||||
} else if (result.error.length) {
|
||||
message = result.error.toString();
|
||||
} else if (result.output.length) {
|
||||
message = result.error.toString();
|
||||
} else {
|
||||
message = "Unknown error";
|
||||
rawMessage = false;
|
||||
}
|
||||
|
||||
super(message);
|
||||
|
||||
this.name = "GitError";
|
||||
this.result = result;
|
||||
this.args = args;
|
||||
this.isRawMessage = rawMessage;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether the provided `error` is an authentication failure
|
||||
* as per our definition. Note that this is not an exhaustive list of
|
||||
* authentication failures, only a collection of errors that we treat
|
||||
* equally in terms of error message and presentation to the user.
|
||||
*/
|
||||
export function isAuthFailureError(
|
||||
error: DugiteError
|
||||
): error is
|
||||
| DugiteError.SSHAuthenticationFailed
|
||||
| DugiteError.SSHPermissionDenied
|
||||
| DugiteError.HTTPSAuthenticationFailed {
|
||||
switch (error) {
|
||||
case DugiteError.SSHAuthenticationFailed:
|
||||
case DugiteError.SSHPermissionDenied:
|
||||
case DugiteError.HTTPSAuthenticationFailed:
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function getDescriptionForError(error: DugiteError): string | null {
|
||||
if (isAuthFailureError(error)) {
|
||||
return `Authentication failed. Some common reasons include:
|
||||
|
||||
- You are not logged in to your account
|
||||
- You may need to log out and log back in to refresh your token.
|
||||
- You do not have permission to access this repository.
|
||||
- The repository is archived on GitHub. Check the repository settings to confirm you are still permitted to push commits.
|
||||
- If you use SSH authentication, check that your key is added to the ssh-agent and associated with your account.
|
||||
- If you use SSH authentication, ensure the host key verification passes for your repository hosting service.
|
||||
- If you used username / password authentication, you might need to use a Personal Access Token instead of your account password. Check the documentation of your repository hosting service.`;
|
||||
}
|
||||
|
||||
switch (error) {
|
||||
case DugiteError.SSHKeyAuditUnverified:
|
||||
return "The SSH key is unverified.";
|
||||
case DugiteError.RemoteDisconnection:
|
||||
return "The remote disconnected. Check your Internet connection and try again.";
|
||||
case DugiteError.HostDown:
|
||||
return "The host is down. Check your Internet connection and try again.";
|
||||
case DugiteError.RebaseConflicts:
|
||||
return "We found some conflicts while trying to rebase. Please resolve the conflicts before continuing.";
|
||||
case DugiteError.MergeConflicts:
|
||||
return "We found some conflicts while trying to merge. Please resolve the conflicts and commit the changes.";
|
||||
case DugiteError.HTTPSRepositoryNotFound:
|
||||
case DugiteError.SSHRepositoryNotFound:
|
||||
return "The repository does not seem to exist anymore. You may not have access, or it may have been deleted or renamed.";
|
||||
case DugiteError.PushNotFastForward:
|
||||
return "The repository has been updated since you last pulled. Try pulling before pushing.";
|
||||
case DugiteError.BranchDeletionFailed:
|
||||
return "Could not delete the branch. It was probably already deleted.";
|
||||
case DugiteError.DefaultBranchDeletionFailed:
|
||||
return `The branch is the repository's default branch and cannot be deleted.`;
|
||||
case DugiteError.RevertConflicts:
|
||||
return "To finish reverting, please merge and commit the changes.";
|
||||
case DugiteError.EmptyRebasePatch:
|
||||
return "There aren’t any changes left to apply.";
|
||||
case DugiteError.NoMatchingRemoteBranch:
|
||||
return "There aren’t any remote branches that match the current branch.";
|
||||
case DugiteError.NothingToCommit:
|
||||
return "There are no changes to commit.";
|
||||
case DugiteError.NoSubmoduleMapping:
|
||||
return "A submodule was removed from .gitmodules, but the folder still exists in the repository. Delete the folder, commit the change, then try again.";
|
||||
case DugiteError.SubmoduleRepositoryDoesNotExist:
|
||||
return "A submodule points to a location which does not exist.";
|
||||
case DugiteError.InvalidSubmoduleSHA:
|
||||
return "A submodule points to a commit which does not exist.";
|
||||
case DugiteError.LocalPermissionDenied:
|
||||
return "Permission denied.";
|
||||
case DugiteError.InvalidMerge:
|
||||
return "This is not something we can merge.";
|
||||
case DugiteError.InvalidRebase:
|
||||
return "This is not something we can rebase.";
|
||||
case DugiteError.NonFastForwardMergeIntoEmptyHead:
|
||||
return "The merge you attempted is not a fast-forward, so it cannot be performed on an empty branch.";
|
||||
case DugiteError.PatchDoesNotApply:
|
||||
return "The requested changes conflict with one or more files in the repository.";
|
||||
case DugiteError.BranchAlreadyExists:
|
||||
return "A branch with that name already exists.";
|
||||
case DugiteError.BadRevision:
|
||||
return "Bad revision.";
|
||||
case DugiteError.NotAGitRepository:
|
||||
return "This is not a git repository.";
|
||||
case DugiteError.ProtectedBranchForcePush:
|
||||
return "This branch is protected from force-push operations.";
|
||||
case DugiteError.ProtectedBranchRequiresReview:
|
||||
return "This branch is protected and any changes requires an approved review. Open a pull request with changes targeting this branch instead.";
|
||||
case DugiteError.PushWithFileSizeExceedingLimit:
|
||||
return "The push operation includes a file which exceeds GitHub's file size restriction of 100MB. Please remove the file from history and try again.";
|
||||
case DugiteError.HexBranchNameRejected:
|
||||
return "The branch name cannot be a 40-character string of hexadecimal characters, as this is the format that Git uses for representing objects.";
|
||||
case DugiteError.ForcePushRejected:
|
||||
return "The force push has been rejected for the current branch.";
|
||||
case DugiteError.InvalidRefLength:
|
||||
return "A ref cannot be longer than 255 characters.";
|
||||
case DugiteError.CannotMergeUnrelatedHistories:
|
||||
return "Unable to merge unrelated histories in this repository.";
|
||||
case DugiteError.PushWithPrivateEmail:
|
||||
return 'Cannot push these commits as they contain an email address marked as private on GitHub. To push anyway, visit https://github.com/settings/emails, uncheck "Keep my email address private", then switch back to GitHub Desktop to push your commits. You can then enable the setting again.';
|
||||
case DugiteError.LFSAttributeDoesNotMatch:
|
||||
return "Git LFS attribute found in global Git configuration does not match expected value.";
|
||||
case DugiteError.ProtectedBranchDeleteRejected:
|
||||
return "This branch cannot be deleted from the remote repository because it is marked as protected.";
|
||||
case DugiteError.ProtectedBranchRequiredStatus:
|
||||
return "The push was rejected by the remote server because a required status check has not been satisfied.";
|
||||
case DugiteError.BranchRenameFailed:
|
||||
return "The branch could not be renamed.";
|
||||
case DugiteError.PathDoesNotExist:
|
||||
return "The path does not exist on disk.";
|
||||
case DugiteError.InvalidObjectName:
|
||||
return "The object was not found in the Git repository.";
|
||||
case DugiteError.OutsideRepository:
|
||||
return "This path is not a valid path inside the repository.";
|
||||
case DugiteError.LockFileAlreadyExists:
|
||||
return "A lock file already exists in the repository, which blocks this operation from completing.";
|
||||
case DugiteError.NoMergeToAbort:
|
||||
return "There is no merge in progress, so there is nothing to abort.";
|
||||
case DugiteError.NoExistingRemoteBranch:
|
||||
return "The remote branch does not exist.";
|
||||
case DugiteError.LocalChangesOverwritten:
|
||||
return "Unable to switch branches as there are working directory changes which would be overwritten. Please commit or stash your changes.";
|
||||
case DugiteError.UnresolvedConflicts:
|
||||
return "There are unresolved conflicts in the working directory.";
|
||||
case DugiteError.ConfigLockFileAlreadyExists:
|
||||
// Added in dugite 1.88.0 (https://github.com/desktop/dugite/pull/386)
|
||||
// in support of https://github.com/desktop/desktop/issues/8675 but we're
|
||||
// not using it yet. Returning a null message here means the stderr will
|
||||
// be used as the error message (or stdout if stderr is empty), i.e. the
|
||||
// same behavior as before the ConfigLockFileAlreadyExists was added
|
||||
return null;
|
||||
case DugiteError.RemoteAlreadyExists:
|
||||
return null;
|
||||
case DugiteError.TagAlreadyExists:
|
||||
return "A tag with that name already exists";
|
||||
case DugiteError.MergeWithLocalChanges:
|
||||
case DugiteError.RebaseWithLocalChanges:
|
||||
case DugiteError.GPGFailedToSignData:
|
||||
case DugiteError.ConflictModifyDeletedInBranch:
|
||||
case DugiteError.MergeCommitNoMainlineOption:
|
||||
return null;
|
||||
default:
|
||||
throw new Error(`Unknown error: ${error}`);
|
||||
}
|
||||
}
|
||||
8
packages/noodl-git/src/core/helpers/object.ts
Normal file
8
packages/noodl-git/src/core/helpers/object.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/** Create a copy of an object by merging it with a subset of its properties. */
|
||||
export function merge<T, K extends keyof T>(obj: T, subset: Pick<T, K>): T {
|
||||
const copy = Object.assign({}, obj);
|
||||
for (const k in subset) {
|
||||
copy[k] = subset[k];
|
||||
}
|
||||
return copy;
|
||||
}
|
||||
88
packages/noodl-git/src/core/helpers/regex.ts
Normal file
88
packages/noodl-git/src/core/helpers/regex.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import matchAll from 'string.prototype.matchall';
|
||||
|
||||
/**
|
||||
* Get all regex captures within a body of text
|
||||
*
|
||||
* @param text string to search
|
||||
* @param re regex to search with. must have global option and one capture
|
||||
*
|
||||
* @returns arrays of strings captured by supplied regex
|
||||
*/
|
||||
export function getCaptures(text: string, re: RegExp): ReadonlyArray<Array<string>> {
|
||||
const matches = getMatches(text, re);
|
||||
const captures = matches.reduce((acc, match) => acc.concat([match.slice(1)]), new Array<Array<string>>());
|
||||
return captures;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all regex matches within a body of text
|
||||
*
|
||||
* @param text string to search
|
||||
* @param re regex to search with. must have global option
|
||||
* @returns set of strings captured by supplied regex
|
||||
*/
|
||||
export function getMatches(text: string, re: RegExp): Array<RegExpExecArray> {
|
||||
if (re.global === false) {
|
||||
throw new Error(
|
||||
'A regex has been provided that is not marked as global, and has the potential to execute forever if it finds a match'
|
||||
);
|
||||
}
|
||||
|
||||
const matches = new Array<RegExpExecArray>();
|
||||
let match = re.exec(text);
|
||||
|
||||
while (match !== null) {
|
||||
matches.push(match);
|
||||
match = re.exec(text);
|
||||
}
|
||||
return matches;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces characters that have a semantic meaning inside of a regexp with
|
||||
* their escaped equivalent (i.e. `*` becomes `\*` etc).
|
||||
*
|
||||
* See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#Escaping
|
||||
*/
|
||||
export function escapeRegExp(expression: string) {
|
||||
return expression.replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
/*
|
||||
* Looks for the phrases "remote: error File " and " is (file size I.E. 106.5 MB); this exceeds GitHub's file size limit of 100.00 MB"
|
||||
* inside of a string containing errors and return an array of all the filenames and their sizes located between these two strings.
|
||||
*
|
||||
* example return [ "LargeFile.exe (150.00 MB)", "AlsoTooLargeOfAFile.txt (1.00 GB)" ]
|
||||
*/
|
||||
export function getFileFromExceedsError(error: string): string[] {
|
||||
const endRegex = /(;\sthis\sexceeds\sGitHub's\sfile\ssize\slimit\sof\s100.00\sMB)/gm;
|
||||
const beginRegex = /(^remote:\serror:\sFile\s)/gm;
|
||||
const beginMatches = Array.from(matchAll(error, beginRegex));
|
||||
const endMatches = Array.from(matchAll(error, endRegex));
|
||||
|
||||
// Something went wrong and we didn't find the same amount of endings as we did beginnings
|
||||
// Just return an empty array as the output we'd give would look weird anyway
|
||||
if (beginMatches.length !== endMatches.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const files: string[] = [];
|
||||
|
||||
for (let index = 0; index < beginMatches.length; index++) {
|
||||
const beginMatch = beginMatches[index];
|
||||
const endMatch = endMatches[index];
|
||||
|
||||
if (beginMatch.index === undefined || endMatch.index === undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const from = beginMatch.index + beginMatch[0].length;
|
||||
const to = endMatch.index;
|
||||
let file = error.slice(from, to);
|
||||
file = file.replace('is ', '(');
|
||||
file += ')';
|
||||
files.push(file);
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
16
packages/noodl-git/src/core/helpers/remove-remote-prefix.ts
Normal file
16
packages/noodl-git/src/core/helpers/remove-remote-prefix.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* Remove the remote prefix from the string. If there is no prefix, returns
|
||||
* null. E.g.:
|
||||
*
|
||||
* origin/my-branch -> my-branch
|
||||
* origin/thing/my-branch -> thing/my-branch
|
||||
* my-branch -> null
|
||||
*/
|
||||
export function removeRemotePrefix(name: string): string | null {
|
||||
const pieces = name.match(/.*?\/(.*)/);
|
||||
if (!pieces || pieces.length < 2) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return pieces[1];
|
||||
}
|
||||
19
packages/noodl-git/src/core/ignore.ts
Normal file
19
packages/noodl-git/src/core/ignore.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
export async function appendGitIgnore(repositoryDir: string, newItems: string[]) {
|
||||
const gitIgnorePath = path.join(repositoryDir, '.gitignore');
|
||||
const content = fs.existsSync(gitIgnorePath) ? await fs.promises.readFile(gitIgnorePath, { encoding: 'utf-8' }) : '';
|
||||
const lineByLine = content.split('\n').map((x) => x.trim());
|
||||
|
||||
const newContent = [];
|
||||
newItems.forEach((text) => {
|
||||
if (!lineByLine.includes(text)) {
|
||||
newContent.push(text);
|
||||
}
|
||||
});
|
||||
|
||||
if (newContent.length > 0) {
|
||||
await fs.promises.appendFile(gitIgnorePath, newContent.join('\r\n') + '\r\n');
|
||||
}
|
||||
}
|
||||
34
packages/noodl-git/src/core/init.ts
Normal file
34
packages/noodl-git/src/core/init.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import path from 'path';
|
||||
import { git } from './client';
|
||||
import { setConfigValue } from './config';
|
||||
import { DEFAULT_BRANCH } from '../constants';
|
||||
|
||||
export async function installMergeDriver(repositoryDir: string) {
|
||||
const driverPath = process.env.devMode
|
||||
? `"${path.join(process.cwd(), 'electron')}"` + ' ' + `"${process.cwd()}"`
|
||||
: `"${process.env.exePath}"`;
|
||||
|
||||
const driver = `${driverPath} --merge %O %A %B %L`;
|
||||
await setConfigValue(
|
||||
repositoryDir,
|
||||
'merge.noodl.name',
|
||||
'Merge driver installed by Noodl to handle merge conflicts in project.json file.'
|
||||
);
|
||||
await setConfigValue(repositoryDir, 'merge.noodl.driver', driver);
|
||||
}
|
||||
|
||||
export interface InitOptions {
|
||||
bare?: boolean;
|
||||
}
|
||||
|
||||
export async function init(repositoryDir: string, options?: InitOptions): Promise<string> {
|
||||
const args = ['-c', `init.defaultBranch=${DEFAULT_BRANCH}`, 'init'];
|
||||
|
||||
if (options?.bare) {
|
||||
args.push('--bare');
|
||||
}
|
||||
|
||||
await git(args, repositoryDir, 'init');
|
||||
|
||||
return repositoryDir;
|
||||
}
|
||||
292
packages/noodl-git/src/core/logs.ts
Normal file
292
packages/noodl-git/src/core/logs.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import { git } from "./client";
|
||||
import {
|
||||
CommittedFileChange,
|
||||
FileStatusKind,
|
||||
PlainFileStatus,
|
||||
CopiedOrRenamedFileStatus,
|
||||
UntrackedFileStatus,
|
||||
} from "./models/status";
|
||||
import { Commit } from "./models/snapshot";
|
||||
import { CommitIdentity } from "./models/commit-identity";
|
||||
import { getCaptures } from "./helpers/regex";
|
||||
import { createLogParser } from "./git-delimiter-parser";
|
||||
import { revRange } from "./rev-list";
|
||||
|
||||
/**
|
||||
* Map the raw status text from Git to an app-friendly value
|
||||
* shamelessly borrowed from GitHub Desktop (Windows)
|
||||
*/
|
||||
function mapStatus(
|
||||
rawStatus: string,
|
||||
oldPath?: string
|
||||
): PlainFileStatus | CopiedOrRenamedFileStatus | UntrackedFileStatus {
|
||||
const status = rawStatus.trim();
|
||||
|
||||
if (status === "M") {
|
||||
return { kind: FileStatusKind.Modified };
|
||||
} // modified
|
||||
if (status === "A") {
|
||||
return { kind: FileStatusKind.New };
|
||||
} // added
|
||||
if (status === "?") {
|
||||
return { kind: FileStatusKind.Untracked };
|
||||
} // untracked
|
||||
if (status === "D") {
|
||||
return { kind: FileStatusKind.Deleted };
|
||||
} // deleted
|
||||
if (status === "R" && oldPath != null) {
|
||||
return { kind: FileStatusKind.Renamed, oldPath };
|
||||
} // renamed
|
||||
if (status === "C" && oldPath != null) {
|
||||
return { kind: FileStatusKind.Copied, oldPath };
|
||||
} // copied
|
||||
|
||||
// git log -M --name-status will return a RXXX - where XXX is a percentage
|
||||
if (status.match(/R[0-9]+/) && oldPath != null) {
|
||||
return { kind: FileStatusKind.Renamed, oldPath };
|
||||
}
|
||||
|
||||
// git log -C --name-status will return a CXXX - where XXX is a percentage
|
||||
if (status.match(/C[0-9]+/) && oldPath != null) {
|
||||
return { kind: FileStatusKind.Copied, oldPath };
|
||||
}
|
||||
|
||||
return { kind: FileStatusKind.Modified };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the repository's commits using `revisionRange` and limited to `limit`
|
||||
*/
|
||||
export async function getCommits(
|
||||
repositoryDir: string,
|
||||
revisionRange?: string,
|
||||
limit?: number,
|
||||
skip?: number,
|
||||
additionalArgs: ReadonlyArray<string> = []
|
||||
): Promise<ReadonlyArray<Commit>> {
|
||||
const { formatArgs, parse } = createLogParser({
|
||||
sha: "%H", // SHA
|
||||
shortSha: "%h", // short SHA
|
||||
summary: "%s", // summary
|
||||
body: "%b", // body
|
||||
// author identity string, matching format of GIT_AUTHOR_IDENT.
|
||||
// author name <author email> <author date>
|
||||
// author date format dependent on --date arg, should be raw
|
||||
author: "%an <%ae> %ad",
|
||||
committer: "%cn <%ce> %cd",
|
||||
parents: "%P", // parent SHAs,
|
||||
// trailers: '%(trailers:unfold,only)',
|
||||
refs: "%D",
|
||||
});
|
||||
|
||||
const args = ["log"];
|
||||
|
||||
if (revisionRange !== undefined) {
|
||||
args.push(revisionRange);
|
||||
}
|
||||
|
||||
args.push("--date=raw");
|
||||
|
||||
if (limit !== undefined) {
|
||||
args.push(`--max-count=${limit}`);
|
||||
}
|
||||
|
||||
if (skip !== undefined) {
|
||||
args.push(`--skip=${skip}`);
|
||||
}
|
||||
|
||||
args.push(
|
||||
...formatArgs,
|
||||
"--no-show-signature",
|
||||
"--no-color",
|
||||
...additionalArgs,
|
||||
"--"
|
||||
);
|
||||
const { exitCode, output } = await git(args, repositoryDir, "getCommits", {
|
||||
successExitCodes: new Set([0, 128]),
|
||||
});
|
||||
|
||||
// if the repository has an unborn HEAD, return an empty history of commits
|
||||
if (exitCode === 128) {
|
||||
return new Array<Commit>();
|
||||
}
|
||||
|
||||
const parsed = parse(output.toString());
|
||||
|
||||
return parsed.map((commit) => {
|
||||
const tags = getCaptures(commit.refs, /tag: ([^\s,]+)/g)
|
||||
.filter((i) => i[0] !== undefined)
|
||||
.map((i) => i[0]);
|
||||
|
||||
return new Commit(
|
||||
repositoryDir,
|
||||
commit.sha,
|
||||
commit.shortSha,
|
||||
commit.summary,
|
||||
commit.body,
|
||||
CommitIdentity.parseIdentity(commit.author),
|
||||
CommitIdentity.parseIdentity(commit.committer),
|
||||
commit.parents.length > 0 ? commit.parents.split(" ") : [],
|
||||
tags
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/** This interface contains information of a changeset. */
|
||||
export interface IChangesetData {
|
||||
/** Files changed in the changeset. */
|
||||
readonly files: ReadonlyArray<CommittedFileChange>;
|
||||
|
||||
/** Number of lines added in the changeset. */
|
||||
readonly linesAdded: number;
|
||||
|
||||
/** Number of lines deleted in the changeset. */
|
||||
readonly linesDeleted: number;
|
||||
}
|
||||
|
||||
/** Get the files that were changed in the given commit. */
|
||||
export async function getChangedFiles(
|
||||
repositoryDir: string,
|
||||
sha: string
|
||||
): Promise<IChangesetData> {
|
||||
// opt-in for rename detection (-M) and copies detection (-C)
|
||||
// this is equivalent to the user configuring 'diff.renames' to 'copies'
|
||||
// NOTE: order here matters - doing -M before -C means copies aren't detected
|
||||
const baseArgs = [
|
||||
"log",
|
||||
sha,
|
||||
"-C",
|
||||
"-M",
|
||||
"-m",
|
||||
"-1",
|
||||
"--no-show-signature",
|
||||
"--first-parent",
|
||||
"--format=format:",
|
||||
"-z",
|
||||
];
|
||||
|
||||
// Run `git log` to obtain the file names and their state
|
||||
const resultNameStatus = await git(
|
||||
[...baseArgs, "--name-status", "--"],
|
||||
repositoryDir,
|
||||
"getChangedFilesNameStatus"
|
||||
);
|
||||
|
||||
const files = parseChangedFiles(resultNameStatus.output.toString(), sha);
|
||||
|
||||
// Run `git log` again, but this time to get the number of lines added/deleted
|
||||
// per file
|
||||
const resultNumStat = await git(
|
||||
[...baseArgs, "--numstat", "--"],
|
||||
repositoryDir,
|
||||
"getChangedFilesNumStats"
|
||||
);
|
||||
|
||||
const linesChanged = parseChangedFilesNumStat(
|
||||
resultNumStat.output.toString()
|
||||
);
|
||||
|
||||
return {
|
||||
files,
|
||||
...linesChanged,
|
||||
};
|
||||
}
|
||||
|
||||
function parseChangedFilesNumStat(stdout: string): {
|
||||
linesAdded: number;
|
||||
linesDeleted: number;
|
||||
} {
|
||||
const lines = stdout.split("\0");
|
||||
let totalLinesAdded = 0;
|
||||
let totalLinesDeleted = 0;
|
||||
|
||||
for (const line of lines) {
|
||||
const parts = line.split("\t");
|
||||
if (parts.length !== 3) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const [added, deleted] = parts;
|
||||
|
||||
if (added === "-" || deleted === "-") {
|
||||
continue;
|
||||
}
|
||||
|
||||
totalLinesAdded += parseInt(added, 10);
|
||||
totalLinesDeleted += parseInt(deleted, 10);
|
||||
}
|
||||
|
||||
return { linesAdded: totalLinesAdded, linesDeleted: totalLinesDeleted };
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses git `log` or `diff` output into a list of changed files
|
||||
* (see `getChangedFiles` for an example of use)
|
||||
*
|
||||
* @param stdout raw output from a git `-z` and `--name-status` flags
|
||||
* @param committish commitish command was run against
|
||||
*/
|
||||
export function parseChangedFiles(
|
||||
stdout: string,
|
||||
committish: string
|
||||
): ReadonlyArray<CommittedFileChange> {
|
||||
const lines = stdout.split("\0");
|
||||
// Remove the trailing empty line
|
||||
lines.splice(-1, 1);
|
||||
const files: CommittedFileChange[] = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const statusText = lines[i];
|
||||
|
||||
let oldPath: string | undefined = undefined;
|
||||
|
||||
if (
|
||||
statusText.length > 0 &&
|
||||
(statusText[0] === "R" || statusText[0] === "C")
|
||||
) {
|
||||
oldPath = lines[++i];
|
||||
}
|
||||
|
||||
const status = mapStatus(statusText, oldPath);
|
||||
|
||||
const path = lines[++i];
|
||||
|
||||
files.push(new CommittedFileChange(path, status, committish));
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
/** Get the commit for the given ref. */
|
||||
export async function getCommit(
|
||||
repositoryDir: string,
|
||||
ref: string
|
||||
): Promise<Commit | null> {
|
||||
const commits = await getCommits(repositoryDir, ref, 1);
|
||||
if (commits.length < 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return commits[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if merge commits exist in history after given commit
|
||||
* If no commitRef is null, goes back to HEAD of branch.
|
||||
*/
|
||||
export async function doMergeCommitsExistAfterCommit(
|
||||
repositoryDir: string,
|
||||
commitRef: string | null
|
||||
): Promise<boolean> {
|
||||
const commitRevRange =
|
||||
commitRef === null ? undefined : revRange(commitRef, "HEAD");
|
||||
|
||||
const mergeCommits = await getCommits(
|
||||
repositoryDir,
|
||||
commitRevRange,
|
||||
undefined,
|
||||
undefined,
|
||||
["--merges"]
|
||||
);
|
||||
|
||||
return mergeCommits.length > 0;
|
||||
}
|
||||
193
packages/noodl-git/src/core/merge-tree-parser.ts
Normal file
193
packages/noodl-git/src/core/merge-tree-parser.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
import { IMergeTreeEntry, MergeTreeResult } from './models/merge';
|
||||
import { ComputedAction } from './models/computed-action';
|
||||
|
||||
interface IBlobSource {
|
||||
readonly type: string;
|
||||
readonly path: string;
|
||||
readonly sha: string;
|
||||
readonly mode: string;
|
||||
}
|
||||
|
||||
function updateCurrentMergeEntry(
|
||||
entry: IMergeTreeEntry | undefined,
|
||||
context: string,
|
||||
blobSource: IBlobSource
|
||||
): IMergeTreeEntry {
|
||||
const currentMergeEntry = entry || {
|
||||
context,
|
||||
diff: ''
|
||||
};
|
||||
|
||||
const blob = {
|
||||
sha: blobSource.sha,
|
||||
mode: blobSource.mode,
|
||||
path: blobSource.path
|
||||
};
|
||||
|
||||
switch (blobSource.type) {
|
||||
case 'base':
|
||||
return {
|
||||
...currentMergeEntry,
|
||||
base: blob
|
||||
};
|
||||
case 'result':
|
||||
return {
|
||||
...currentMergeEntry,
|
||||
result: blob
|
||||
};
|
||||
case 'our':
|
||||
return {
|
||||
...currentMergeEntry,
|
||||
our: blob
|
||||
};
|
||||
case 'their':
|
||||
return {
|
||||
...currentMergeEntry,
|
||||
their: blob
|
||||
};
|
||||
default:
|
||||
return currentMergeEntry;
|
||||
}
|
||||
}
|
||||
|
||||
// the merge-tree output is a collection of entries like this
|
||||
//
|
||||
// changed in both
|
||||
// base 100644 f69fbc5c40409a1db7a3f8353bfffe46a21d6054 atom/browser/resources/mac/Info.plist
|
||||
// our 100644 9094f0f7335edf833d51f688851e6a105de60433 atom/browser/resources/mac/Info.plist
|
||||
// their 100644 2dd8bc646cff3869557549a39477e30022e6cfdd atom/browser/resources/mac/Info.plist
|
||||
// @@ -17,9 +17,15 @@
|
||||
// <key>CFBundleIconFile</key>
|
||||
// <string>electron.icns</string>
|
||||
// <key>CFBundleVersion</key>
|
||||
// +<<<<<<< .our
|
||||
// <string>4.0.0</string>
|
||||
// <key>CFBundleShortVersionString</key>
|
||||
// <string>4.0.0</string>
|
||||
// +=======
|
||||
// + <string>1.4.16</string>
|
||||
// + <key>CFBundleShortVersionString</key>
|
||||
// + <string>1.4.16</string>
|
||||
// +>>>>>>> .their
|
||||
// <key>LSApplicationCategoryType</key>
|
||||
//<string>public.app-category.developer-tools</string>
|
||||
// <key>LSMinimumSystemVersion</key>
|
||||
|
||||
// The first line for each entry is what I'm referring to as the the header
|
||||
// This regex filters on the known entries that can appear
|
||||
const contextHeaderRe = /^(merged|added in remote|removed in remote|changed in both|removed in local|added in both)$/;
|
||||
|
||||
// the rest of the header is made up of a number of entries formatted like this
|
||||
//
|
||||
// base 100644 f69fbc5c40409a1db7a3f8353bfffe46a21d6054 atom/browser/resources/mac/Info.plist
|
||||
//
|
||||
// this regex let's us extract the blob details - the filename may also change
|
||||
// as part of the merge if files are moved or renamed
|
||||
const blobEntryRe = /^\s{2}(result|our|their|base)\s+(\d{6})\s([0-9a-f]{40})\s(.+)$/;
|
||||
|
||||
/**
|
||||
* Parse the Git output of a merge-tree command to identify whether it
|
||||
* has detected any conflicts between the branches to be merged
|
||||
*
|
||||
* @param text the stdout from a `git merge-tree` command
|
||||
*
|
||||
*/
|
||||
export function parseMergeTreeResult(text: string): MergeTreeResult {
|
||||
const entries = new Array<IMergeTreeEntry>();
|
||||
|
||||
const lines = text.split('\n');
|
||||
|
||||
let mergeEntryHeader: string | undefined;
|
||||
let currentMergeEntry: IMergeTreeEntry | undefined;
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const headerMatch = contextHeaderRe.exec(line);
|
||||
if (headerMatch != null) {
|
||||
mergeEntryHeader = headerMatch[1];
|
||||
|
||||
// push the previous entry, if defined, into the array
|
||||
if (currentMergeEntry != null) {
|
||||
entries.push(currentMergeEntry);
|
||||
currentMergeEntry = undefined;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// the next lines are a number of merge result entries
|
||||
// pointing to blobs representing the source blob
|
||||
// and the resulting blob generated by the merge
|
||||
const blobMatch = blobEntryRe.exec(line);
|
||||
if (blobMatch != null) {
|
||||
const type = blobMatch[1];
|
||||
const mode = blobMatch[2];
|
||||
const sha = blobMatch[3];
|
||||
const path = blobMatch[4];
|
||||
|
||||
const blob = {
|
||||
type,
|
||||
mode,
|
||||
sha,
|
||||
path
|
||||
};
|
||||
|
||||
if (mergeEntryHeader == null) {
|
||||
console.warn(`An unknown header was set while trying to parse the blob on line ${i}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case 'base':
|
||||
case 'result':
|
||||
case 'our':
|
||||
case 'their':
|
||||
currentMergeEntry = updateCurrentMergeEntry(currentMergeEntry, mergeEntryHeader, blob);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error(`invalid state - unexpected entry ${type} found when parsing rows`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (currentMergeEntry == null) {
|
||||
throw new Error(`invalid state - trying to append the diff to a merge entry that isn't defined on line ${i}`);
|
||||
} else {
|
||||
const currentDiff = currentMergeEntry.diff;
|
||||
const newDiff = currentDiff + line + '\n';
|
||||
currentMergeEntry = {
|
||||
...currentMergeEntry,
|
||||
diff: newDiff
|
||||
};
|
||||
|
||||
const lineHasConflictMarker =
|
||||
line.startsWith('+<<<<<<<') || line.startsWith('+=======') || line.startsWith('+>>>>>>>');
|
||||
|
||||
if (lineHasConflictMarker) {
|
||||
currentMergeEntry = {
|
||||
...currentMergeEntry,
|
||||
hasConflicts: true
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ensure the last entry is pushed onto the array
|
||||
if (currentMergeEntry != null) {
|
||||
entries.push(currentMergeEntry);
|
||||
currentMergeEntry = undefined;
|
||||
}
|
||||
|
||||
const entriesWithConflicts = entries.filter((e) => e.hasConflicts || false);
|
||||
|
||||
if (entriesWithConflicts.length > 0) {
|
||||
return {
|
||||
kind: ComputedAction.Conflicts,
|
||||
conflictedFiles: entriesWithConflicts.length,
|
||||
conflictedEntries: entriesWithConflicts
|
||||
};
|
||||
} else {
|
||||
return { kind: ComputedAction.Clean, entries };
|
||||
}
|
||||
}
|
||||
206
packages/noodl-git/src/core/merge.ts
Normal file
206
packages/noodl-git/src/core/merge.ts
Normal file
@@ -0,0 +1,206 @@
|
||||
import fs from 'fs';
|
||||
import * as Path from 'path';
|
||||
|
||||
import { git } from './client';
|
||||
import { GitError } from 'dugite';
|
||||
import { Branch } from './models/branch';
|
||||
import { MergeTreeResult } from './models/merge';
|
||||
import { ComputedAction } from './models/computed-action';
|
||||
import { parseMergeTreeResult } from './merge-tree-parser';
|
||||
|
||||
export enum MergeResult {
|
||||
/** The merge completed successfully */
|
||||
Success,
|
||||
/**
|
||||
* The merge was a noop since the current branch
|
||||
* was already up to date with the target branch.
|
||||
*/
|
||||
AlreadyUpToDate,
|
||||
/**
|
||||
* The merge failed, likely due to conflicts.
|
||||
*/
|
||||
Failed
|
||||
}
|
||||
|
||||
/** Merge the named branch into the current branch. */
|
||||
export async function merge(
|
||||
repositoryDir: string,
|
||||
branch: string,
|
||||
options: {
|
||||
strategy?: string;
|
||||
strategyOption?: string;
|
||||
isSquash?: boolean;
|
||||
squashNoCommit?: boolean;
|
||||
message?: string;
|
||||
noFastForward?: boolean;
|
||||
}
|
||||
): Promise<MergeResult> {
|
||||
const args = ['merge'];
|
||||
|
||||
if (options.message) {
|
||||
args.push('-m', options.message);
|
||||
}
|
||||
|
||||
if (options.isSquash) {
|
||||
args.push('--squash');
|
||||
} else if (options.noFastForward) {
|
||||
// No Fast Forward, meaning that there will always be a merge commit.
|
||||
args.push('--no-ff');
|
||||
}
|
||||
|
||||
if (options.strategy) {
|
||||
args.push('--strategy', options.strategy);
|
||||
}
|
||||
|
||||
if (options.strategyOption) {
|
||||
args.push('--strategy-option', options.strategyOption);
|
||||
}
|
||||
|
||||
args.push(branch);
|
||||
|
||||
const { exitCode, output } = await git(args, repositoryDir, 'merge', {
|
||||
expectedErrors: new Set([GitError.MergeConflicts])
|
||||
});
|
||||
|
||||
if (exitCode !== 0) {
|
||||
return MergeResult.Failed;
|
||||
}
|
||||
|
||||
if (options.isSquash && !options.squashNoCommit) {
|
||||
const squashArgs = ['commit', '--no-edit'];
|
||||
|
||||
if (options.message) {
|
||||
squashArgs.push('-m', options.message);
|
||||
}
|
||||
|
||||
const { exitCode } = await git(squashArgs, repositoryDir, 'merge');
|
||||
if (exitCode !== 0) {
|
||||
return MergeResult.Failed;
|
||||
}
|
||||
}
|
||||
|
||||
return output.toString() === noopMergeMessage ? MergeResult.AlreadyUpToDate : MergeResult.Success;
|
||||
}
|
||||
|
||||
const noopMergeMessage = 'Already up to date.\n';
|
||||
|
||||
/**
|
||||
* Find the base commit between two commit-ish identifiers
|
||||
*
|
||||
* @returns the commit id of the merge base, or null if the two commit-ish
|
||||
* identifiers do not have a common base
|
||||
*/
|
||||
export async function getMergeBase(
|
||||
repositoryDir: string,
|
||||
firstCommitish: string,
|
||||
secondCommitish: string
|
||||
): Promise<string | null> {
|
||||
const process = await git(['merge-base', firstCommitish, secondCommitish], repositoryDir, 'merge-base', {
|
||||
// - 1 is returned if a common ancestor cannot be resolved
|
||||
// - 128 is returned if a ref cannot be found
|
||||
// "warning: ignoring broken ref refs/remotes/origin/main."
|
||||
successExitCodes: new Set([0, 1, 128])
|
||||
});
|
||||
|
||||
if (process.exitCode === 1 || process.exitCode === 128) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return process.output.toString().trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the merge result from two branches in a repository
|
||||
*
|
||||
* @param repository The repository containing the branches to merge
|
||||
* @param ours The current branch
|
||||
* @param theirs Another branch to merge into the current branch
|
||||
*/
|
||||
export async function mergeTree(repositoryDir: string, ours: Branch, theirs: Branch): Promise<MergeTreeResult | null> {
|
||||
const mergeBase = await getMergeBase(repositoryDir, ours.tip.sha, theirs.tip.sha);
|
||||
|
||||
if (mergeBase === null) {
|
||||
return { kind: ComputedAction.Invalid };
|
||||
}
|
||||
|
||||
if (mergeBase === ours.tip.sha || mergeBase === theirs.tip.sha) {
|
||||
return { kind: ComputedAction.Clean, entries: [] };
|
||||
}
|
||||
|
||||
const result = await git(['merge-tree', mergeBase, ours.tip.sha, theirs.tip.sha], repositoryDir, 'mergeTree');
|
||||
|
||||
const output = result.output.toString();
|
||||
|
||||
if (output.length === 0) {
|
||||
// the merge commit will be empty - this is fine!
|
||||
return { kind: ComputedAction.Clean, entries: [] };
|
||||
}
|
||||
|
||||
return parseMergeTreeResult(output);
|
||||
}
|
||||
|
||||
export async function mergeTreeCommit(
|
||||
repositoryDir: string,
|
||||
oursCommitish: string,
|
||||
theirsCommitish: string
|
||||
): Promise<MergeTreeResult | null> {
|
||||
const mergeBase = await getMergeBase(repositoryDir, oursCommitish, theirsCommitish);
|
||||
|
||||
if (mergeBase === null) {
|
||||
return { kind: ComputedAction.Invalid };
|
||||
}
|
||||
|
||||
if (mergeBase === oursCommitish || mergeBase === theirsCommitish) {
|
||||
return { kind: ComputedAction.Clean, entries: [] };
|
||||
}
|
||||
|
||||
const result = await git(['merge-tree', mergeBase, oursCommitish, theirsCommitish], repositoryDir, 'mergeTree');
|
||||
|
||||
const output = result.output.toString();
|
||||
|
||||
if (output.length === 0) {
|
||||
// the merge commit will be empty - this is fine!
|
||||
return { kind: ComputedAction.Clean, entries: [] };
|
||||
}
|
||||
|
||||
return parseMergeTreeResult(output);
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort a mid-flight (conflicted) merge
|
||||
*
|
||||
* @param repository where to abort the merge
|
||||
*/
|
||||
export async function abortMerge(repositoryDir: string): Promise<void> {
|
||||
await git(['merge', '--abort'], repositoryDir, 'abortMerge');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check the `.git/MERGE_HEAD` file exists in a repository to confirm
|
||||
* that it is in a conflicted state.
|
||||
*/
|
||||
export async function isMergeHeadSet(repositoryDir: string): Promise<boolean> {
|
||||
const path = Path.join(repositoryDir, '.git', 'MERGE_HEAD');
|
||||
|
||||
// NOTE: access calls reject, which causes issues with devtools.
|
||||
return new Promise<boolean>((resolve) => {
|
||||
resolve(fs.existsSync(path));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check the `.git/SQUASH_MSG` file exists in a repository
|
||||
* This would indicate we did a merge --squash and have not committed.. indicating
|
||||
* we have detected a conflict.
|
||||
*
|
||||
* Note: If we abort the merge, this doesn't get cleared automatically which
|
||||
* could lead to this being erroneously available in a non merge --squashing scenario.
|
||||
*/
|
||||
export async function isSquashMsgSet(repositoryDir: string): Promise<boolean> {
|
||||
const path = Path.join(repositoryDir, '.git', 'SQUASH_MSG');
|
||||
|
||||
// NOTE: access calls reject, which causes issues with devtools.
|
||||
return new Promise<boolean>((resolve) => {
|
||||
resolve(fs.existsSync(path));
|
||||
});
|
||||
}
|
||||
143
packages/noodl-git/src/core/models/branch.ts
Normal file
143
packages/noodl-git/src/core/models/branch.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { Commit } from "./snapshot";
|
||||
import { removeRemotePrefix } from "../helpers/remove-remote-prefix";
|
||||
import { CommitIdentity } from "./commit-identity";
|
||||
|
||||
// NOTE: The values here matter as they are used to sort
|
||||
// local and remote branches, Local should come before Remote
|
||||
export enum BranchType {
|
||||
Local = 0,
|
||||
Remote = 1,
|
||||
}
|
||||
|
||||
/** The number of commits a revision range is ahead/behind. */
|
||||
export interface IAheadBehind {
|
||||
readonly ahead: number;
|
||||
readonly behind: number;
|
||||
}
|
||||
|
||||
/** The result of comparing two refs in a repository. */
|
||||
export interface ICompareResult extends IAheadBehind {
|
||||
readonly commits: ReadonlyArray<Commit>;
|
||||
}
|
||||
|
||||
/** Basic data about a branch, and the branch it's tracking. */
|
||||
export interface ITrackingBranch {
|
||||
readonly ref: string;
|
||||
readonly sha: string;
|
||||
readonly upstreamRef: string;
|
||||
readonly upstreamSha: string;
|
||||
}
|
||||
|
||||
/** Basic data about the latest commit on the branch. */
|
||||
export interface IBranchTip {
|
||||
readonly sha: string;
|
||||
readonly author: CommitIdentity;
|
||||
}
|
||||
|
||||
/** Default rules for where to create a branch from */
|
||||
export enum StartPoint {
|
||||
CurrentBranch = "CurrentBranch",
|
||||
DefaultBranch = "DefaultBranch",
|
||||
Head = "Head",
|
||||
/** Only valid for forks */
|
||||
UpstreamDefaultBranch = "UpstreamDefaultBranch",
|
||||
}
|
||||
|
||||
/** A branch as loaded from Git. */
|
||||
export class Branch {
|
||||
/**
|
||||
* A branch as loaded from Git.
|
||||
*
|
||||
* @param name The short name of the branch. E.g., `main`.
|
||||
* @param upstream The remote-prefixed upstream name. E.g., `origin/main`.
|
||||
* @param tip Basic information (sha and author) of the latest commit on the branch.
|
||||
* @param type The type of branch, e.g., local or remote.
|
||||
* @param ref The canonical ref of the branch
|
||||
*/
|
||||
public constructor(
|
||||
public readonly name: string,
|
||||
public readonly upstream: string | null,
|
||||
public readonly tip: IBranchTip,
|
||||
public readonly type: BranchType,
|
||||
public readonly ref: string,
|
||||
public readonly remote?: Branch
|
||||
) {}
|
||||
|
||||
public withRemote(remote: Branch) {
|
||||
return new Branch(
|
||||
this.name,
|
||||
this.upstream,
|
||||
this.tip,
|
||||
this.type,
|
||||
this.ref,
|
||||
remote
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* This might be a little confusing, git has different refs.
|
||||
*
|
||||
* If the ref is "refs/remotes" then there will never be a upstream.
|
||||
* If the ref is "refs/heads" it might have a upstream if it is remote.
|
||||
*
|
||||
* "remote" variable here only means that there is a remote branch created,
|
||||
* but it might not sync with the remote branch.
|
||||
*/
|
||||
public get isLocal(): boolean {
|
||||
return !Boolean(this.upstream) && this.type === BranchType.Local;
|
||||
}
|
||||
|
||||
/** The name of the upstream's remote. */
|
||||
public get upstreamRemoteName(): string | null {
|
||||
const upstream = this.upstream;
|
||||
if (!upstream) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const pieces = upstream.match(/(.*?)\/.*/);
|
||||
if (!pieces || pieces.length < 2) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return pieces[1];
|
||||
}
|
||||
|
||||
/** The name of remote for a remote branch. If local, will return null. */
|
||||
public get remoteName(): string | null {
|
||||
if (this.type === BranchType.Local) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const pieces = this.ref.match(/^refs\/remotes\/(.*?)\/.*/);
|
||||
if (!pieces || pieces.length !== 2) {
|
||||
// This shouldn't happen, the remote ref should always be prefixed
|
||||
// with refs/remotes
|
||||
throw new Error(`Remote branch ref has unexpected format: ${this.ref}`);
|
||||
}
|
||||
return pieces[1];
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the branch's upstream without the remote prefix.
|
||||
*/
|
||||
public get upstreamWithoutRemote(): string | null {
|
||||
if (!this.upstream) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return removeRemotePrefix(this.upstream);
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the branch without the remote prefix. If the branch is a local
|
||||
* branch, this is the same as its `name`.
|
||||
*/
|
||||
public get nameWithoutRemote(): string {
|
||||
if (this.type === BranchType.Local) {
|
||||
return this.name;
|
||||
} else {
|
||||
const withoutRemote = removeRemotePrefix(this.name);
|
||||
return withoutRemote || this.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
9
packages/noodl-git/src/core/models/clone-options.ts
Normal file
9
packages/noodl-git/src/core/models/clone-options.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/** Additional arguments to provide when cloning a repository */
|
||||
export type CloneOptions = {
|
||||
/** The branch to checkout after the clone has completed. */
|
||||
readonly branch?: string;
|
||||
/** The default branch name in case we're cloning an empty repository. */
|
||||
readonly defaultBranch?: string;
|
||||
/** Clone a single branch; this is just for testing. */
|
||||
readonly singleBranch?: boolean;
|
||||
};
|
||||
58
packages/noodl-git/src/core/models/commit-identity.ts
Normal file
58
packages/noodl-git/src/core/models/commit-identity.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
/**
|
||||
* A tuple of name, email, and date for the author or commit
|
||||
* info in a commit.
|
||||
*/
|
||||
export class CommitIdentity {
|
||||
/**
|
||||
* Parses a Git ident string (GIT_AUTHOR_IDENT or GIT_COMMITTER_IDENT)
|
||||
* into a commit identity. Throws an error if identify string is invalid.
|
||||
*/
|
||||
public static parseIdentity(identity: string): CommitIdentity {
|
||||
// See fmt_ident in ident.c:
|
||||
// https://github.com/git/git/blob/3ef7618e6/ident.c#L346
|
||||
//
|
||||
// Format is "NAME <EMAIL> DATE"
|
||||
// Markus Olsson <j.markus.olsson@gmail.com> 1475670580 +0200
|
||||
//
|
||||
// Note that `git var` will strip any < and > from the name and email, see:
|
||||
// https://github.com/git/git/blob/3ef7618e6/ident.c#L396
|
||||
//
|
||||
// Note also that this expects a date formatted with the RAW option in git see:
|
||||
// https://github.com/git/git/blob/35f6318d4/date.c#L191
|
||||
//
|
||||
const m = identity.match(/^(.*?) <(.*?)> (\d+) (\+|-)?(\d{2})(\d{2})/);
|
||||
if (!m) {
|
||||
throw new Error(`Couldn't parse identity ${identity}`);
|
||||
}
|
||||
|
||||
const name = m[1];
|
||||
const email = m[2];
|
||||
// The date is specified as seconds from the epoch,
|
||||
// Date() expects milliseconds since the epoch.
|
||||
const date = new Date(parseInt(m[3], 10) * 1000);
|
||||
|
||||
if (isNaN(date.valueOf())) {
|
||||
throw new Error(`Couldn't parse identity ${identity}, invalid date`);
|
||||
}
|
||||
|
||||
// The RAW option never uses alphanumeric timezone identifiers and in my
|
||||
// testing I've never found it to omit the leading + for a positive offset
|
||||
// but the docs for strprintf seems to suggest it might on some systems so
|
||||
// we're playing it safe.
|
||||
const tzSign = m[4] === '-' ? '-' : '+';
|
||||
const tzHH = m[5];
|
||||
const tzmm = m[6];
|
||||
|
||||
const tzMinutes = parseInt(tzHH, 10) * 60 + parseInt(tzmm, 10);
|
||||
const tzOffset = tzMinutes * (tzSign === '-' ? -1 : 1);
|
||||
|
||||
return new CommitIdentity(name, email, date, tzOffset);
|
||||
}
|
||||
|
||||
public constructor(
|
||||
public readonly name: string,
|
||||
public readonly email: string,
|
||||
public readonly date: Date,
|
||||
public readonly tzOffset: number = new Date().getTimezoneOffset()
|
||||
) {}
|
||||
}
|
||||
13
packages/noodl-git/src/core/models/computed-action.ts
Normal file
13
packages/noodl-git/src/core/models/computed-action.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
/**
|
||||
* An action being computed in the background on behalf of the user
|
||||
*/
|
||||
export enum ComputedAction {
|
||||
/** The action is being computed in the background */
|
||||
Loading = 'loading',
|
||||
/** The action should complete without any additional work required by the user */
|
||||
Clean = 'clean',
|
||||
/** The action requires additional work by the user to complete successfully */
|
||||
Conflicts = 'conflicts',
|
||||
/** The action cannot be completed, for reasons the app should explain */
|
||||
Invalid = 'invalid'
|
||||
}
|
||||
58
packages/noodl-git/src/core/models/diff-data.ts
Normal file
58
packages/noodl-git/src/core/models/diff-data.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { FileChange, getMediaType } from './status';
|
||||
|
||||
export enum DiffType {
|
||||
/** Changes to a text file, which may be partially selected for commit */
|
||||
Text,
|
||||
/** Changes to a file with a known extension, which can be viewed in the app */
|
||||
Image,
|
||||
/** Changes to an unknown file format, which Git is unable to present in a human-friendly format */
|
||||
Binary,
|
||||
/** Change to a repository which is included as a submodule of this repository */
|
||||
Submodule,
|
||||
/** Diff that will not be rendered */
|
||||
Unrenderable
|
||||
}
|
||||
|
||||
export interface ITextDiff {
|
||||
readonly kind: DiffType.Text;
|
||||
readonly modified?: string;
|
||||
readonly original: string;
|
||||
}
|
||||
|
||||
export class Image {
|
||||
public get dataSource(): string {
|
||||
return `data:${this.mediaType};base64,${this.contents}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param contents The base64 encoded contents of the image.
|
||||
* @param mediaType The data URI media type, so the browser can render the image correctly.
|
||||
* @param bytes Size of the file in bytes.
|
||||
*/
|
||||
public constructor(
|
||||
public readonly contents: string,
|
||||
public readonly mediaType: string,
|
||||
public readonly bytes: number
|
||||
) {}
|
||||
|
||||
public static fromBinary(file: FileChange, buffer: Buffer) {
|
||||
return new Image(buffer.toString('base64'), getMediaType(file.extension), buffer.length);
|
||||
}
|
||||
}
|
||||
|
||||
export interface IImageDiff {
|
||||
readonly kind: DiffType.Image;
|
||||
readonly modified?: Image;
|
||||
readonly original: Image;
|
||||
}
|
||||
|
||||
export interface IBinaryDiff {
|
||||
readonly kind: DiffType.Binary;
|
||||
}
|
||||
|
||||
export interface IUnrenderableDiff {
|
||||
readonly kind: DiffType.Unrenderable;
|
||||
}
|
||||
|
||||
/** The union of diff types */
|
||||
export type IDiff = ITextDiff | IImageDiff | IBinaryDiff | IUnrenderableDiff;
|
||||
41
packages/noodl-git/src/core/models/diff-line.ts
Normal file
41
packages/noodl-git/src/core/models/diff-line.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
/** indicate what a line in the diff represents */
|
||||
export enum DiffLineType {
|
||||
Context,
|
||||
Add,
|
||||
Delete,
|
||||
Hunk
|
||||
}
|
||||
|
||||
/** track details related to each line in the diff */
|
||||
export class DiffLine {
|
||||
public constructor(
|
||||
public readonly text: string,
|
||||
public readonly type: DiffLineType,
|
||||
// Line number in the original diff patch (before expanding it), or null if
|
||||
// it was added as part of a diff expansion action.
|
||||
public readonly originalLineNumber: number | null,
|
||||
public readonly oldLineNumber: number | null,
|
||||
public readonly newLineNumber: number | null,
|
||||
public readonly noTrailingNewLine: boolean = false
|
||||
) {}
|
||||
|
||||
public withNoTrailingNewLine(noTrailingNewLine: boolean): DiffLine {
|
||||
return new DiffLine(
|
||||
this.text,
|
||||
this.type,
|
||||
this.originalLineNumber,
|
||||
this.oldLineNumber,
|
||||
this.newLineNumber,
|
||||
noTrailingNewLine
|
||||
);
|
||||
}
|
||||
|
||||
public isIncludeableLine() {
|
||||
return this.type === DiffLineType.Add || this.type === DiffLineType.Delete;
|
||||
}
|
||||
|
||||
/** The content of the line, i.e., without the line type marker. */
|
||||
public get content(): string {
|
||||
return this.text.substr(1);
|
||||
}
|
||||
}
|
||||
242
packages/noodl-git/src/core/models/diff-selection.ts
Normal file
242
packages/noodl-git/src/core/models/diff-selection.ts
Normal file
@@ -0,0 +1,242 @@
|
||||
/**
|
||||
* The state of a file's diff selection
|
||||
*/
|
||||
export enum DiffSelectionType {
|
||||
/** The entire file should be committed */
|
||||
All = 'All',
|
||||
/** A subset of lines in the file have been selected for committing */
|
||||
Partial = 'Partial',
|
||||
/** The file should be excluded from committing */
|
||||
None = 'None'
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function which determines whether a boolean selection state
|
||||
* matches the given DiffSelectionType. A true selection state matches
|
||||
* DiffSelectionType.All, a false selection state matches
|
||||
* DiffSelectionType.None and if the selection type is partial there's
|
||||
* never a match.
|
||||
*/
|
||||
function typeMatchesSelection(selectionType: DiffSelectionType, selected: boolean): boolean {
|
||||
switch (selectionType) {
|
||||
case DiffSelectionType.All:
|
||||
return selected;
|
||||
case DiffSelectionType.None:
|
||||
return !selected;
|
||||
case DiffSelectionType.Partial:
|
||||
return false;
|
||||
default:
|
||||
throw new Error(`Unknown selection type ${selectionType}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An immutable, efficient, storage object for tracking selections of indexable
|
||||
* lines. While general purpose by design this is currently used exclusively for
|
||||
* tracking selected lines in modified files in the working directory.
|
||||
*
|
||||
* This class starts out with an initial (or default) selection state, ie
|
||||
* either all lines are selected by default or no lines are selected by default.
|
||||
*
|
||||
* The selection can then be transformed by marking a line or a range of lines
|
||||
* as selected or not selected. Internally the class maintains a list of lines
|
||||
* whose selection state has diverged from the default selection state.
|
||||
*/
|
||||
export class DiffSelection {
|
||||
/**
|
||||
* Initialize a new selection instance where either all lines are selected by default
|
||||
* or not lines are selected by default.
|
||||
*/
|
||||
public static fromInitialSelection(initialSelection: DiffSelectionType.All | DiffSelectionType.None): DiffSelection {
|
||||
if (initialSelection !== DiffSelectionType.All && initialSelection !== DiffSelectionType.None) {
|
||||
throw new Error('Can only instantiate a DiffSelection with All or None as the initial selection');
|
||||
}
|
||||
|
||||
return new DiffSelection(initialSelection, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param divergingLines Any line numbers where the selection differs from the default state.
|
||||
* @param selectableLines Optional set of line numbers which can be selected.
|
||||
*/
|
||||
private constructor(
|
||||
private readonly defaultSelectionType: DiffSelectionType.All | DiffSelectionType.None,
|
||||
private readonly divergingLines: Set<number> | null = null,
|
||||
private readonly selectableLines: Set<number> | null = null
|
||||
) {}
|
||||
|
||||
/** Returns a value indicating the computed overall state of the selection */
|
||||
public getSelectionType(): DiffSelectionType {
|
||||
const divergingLines = this.divergingLines;
|
||||
const selectableLines = this.selectableLines;
|
||||
|
||||
// No diverging lines, happy path. Either all lines are selected or none are.
|
||||
if (!divergingLines) {
|
||||
return this.defaultSelectionType;
|
||||
}
|
||||
if (divergingLines.size === 0) {
|
||||
return this.defaultSelectionType;
|
||||
}
|
||||
|
||||
// If we know which lines are selectable we need to check that
|
||||
// all lines are divergent and return the inverse of default selection.
|
||||
// To avoid looping through the set that often our happy path is
|
||||
// if there's a size mismatch.
|
||||
if (selectableLines && selectableLines.size === divergingLines.size) {
|
||||
const allSelectableLinesAreDivergent = [...selectableLines].every((i) => divergingLines.has(i));
|
||||
|
||||
if (allSelectableLinesAreDivergent) {
|
||||
return this.defaultSelectionType === DiffSelectionType.All ? DiffSelectionType.None : DiffSelectionType.All;
|
||||
}
|
||||
}
|
||||
|
||||
// Note that without any selectable lines we'll report partial selection
|
||||
// as long as we have any diverging lines since we have no way of knowing
|
||||
// if _all_ lines are divergent or not
|
||||
return DiffSelectionType.Partial;
|
||||
}
|
||||
|
||||
/** Returns a value indicating wether the given line number is selected or not */
|
||||
public isSelected(lineIndex: number): boolean {
|
||||
const lineIsDivergent = !!this.divergingLines && this.divergingLines.has(lineIndex);
|
||||
|
||||
if (this.defaultSelectionType === DiffSelectionType.All) {
|
||||
return !lineIsDivergent;
|
||||
} else if (this.defaultSelectionType === DiffSelectionType.None) {
|
||||
return lineIsDivergent;
|
||||
} else {
|
||||
throw new Error(`Unknown base selection type ${this.defaultSelectionType}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a value indicating wether the given line number is selectable.
|
||||
* A line not being selectable usually means it's a hunk header or a context
|
||||
* line.
|
||||
*/
|
||||
public isSelectable(lineIndex: number): boolean {
|
||||
return this.selectableLines ? this.selectableLines.has(lineIndex) : true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this selection instance with the provided
|
||||
* line selection update.
|
||||
*
|
||||
* @param lineIndex The index (line number) of the line which should
|
||||
* be selected or unselected.
|
||||
*
|
||||
* @param selected Whether the given line number should be marked
|
||||
* as selected or not.
|
||||
*/
|
||||
public withLineSelection(lineIndex: number, selected: boolean): DiffSelection {
|
||||
return this.withRangeSelection(lineIndex, 1, selected);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this selection instance with the provided
|
||||
* line selection update. This is similar to the withLineSelection
|
||||
* method except that it allows updating the selection state of
|
||||
* a range of lines at once. Use this if you ever need to modify
|
||||
* the selection state of more than one line at a time as it's
|
||||
* more efficient.
|
||||
*
|
||||
* @param from The line index (inclusive) from where to start
|
||||
* updating the line selection state.
|
||||
*
|
||||
* @param to The number of lines for which to update the
|
||||
* selection state. A value of zero means no lines
|
||||
* are updated and a value of 1 means only the
|
||||
* line given by lineIndex will be updated.
|
||||
*
|
||||
* @param selected Whether the lines should be marked as selected
|
||||
* or not.
|
||||
*/
|
||||
// Lower inclusive, upper exclusive. Same as substring
|
||||
public withRangeSelection(from: number, length: number, selected: boolean): DiffSelection {
|
||||
const computedSelectionType = this.getSelectionType();
|
||||
const to = from + length;
|
||||
|
||||
// Nothing for us to do here. This state is when all lines are already
|
||||
// selected and we're being asked to select more or when no lines are
|
||||
// selected and we're being asked to unselect something.
|
||||
if (typeMatchesSelection(computedSelectionType, selected)) {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (computedSelectionType === DiffSelectionType.Partial) {
|
||||
const newDivergingLines = new Set<number>(this.divergingLines!);
|
||||
|
||||
if (typeMatchesSelection(this.defaultSelectionType, selected)) {
|
||||
for (let i = from; i < to; i++) {
|
||||
newDivergingLines.delete(i);
|
||||
}
|
||||
} else {
|
||||
for (let i = from; i < to; i++) {
|
||||
// Ensure it's selectable
|
||||
if (this.isSelectable(i)) {
|
||||
newDivergingLines.add(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new DiffSelection(
|
||||
this.defaultSelectionType,
|
||||
newDivergingLines.size === 0 ? null : newDivergingLines,
|
||||
this.selectableLines
|
||||
);
|
||||
} else {
|
||||
const newDivergingLines = new Set<number>();
|
||||
for (let i = from; i < to; i++) {
|
||||
if (this.isSelectable(i)) {
|
||||
newDivergingLines.add(i);
|
||||
}
|
||||
}
|
||||
|
||||
return new DiffSelection(computedSelectionType, newDivergingLines, this.selectableLines);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this selection instance where the selection state
|
||||
* of the specified line has been toggled (inverted).
|
||||
*
|
||||
* @param lineIndex The index (line number) of the line which should
|
||||
* be selected or unselected.
|
||||
*/
|
||||
public withToggleLineSelection(lineIndex: number): DiffSelection {
|
||||
return this.withLineSelection(lineIndex, !this.isSelected(lineIndex));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this selection instance with all lines selected.
|
||||
*/
|
||||
public withSelectAll(): DiffSelection {
|
||||
return new DiffSelection(DiffSelectionType.All, null, this.selectableLines);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this selection instance with no lines selected.
|
||||
*/
|
||||
public withSelectNone(): DiffSelection {
|
||||
return new DiffSelection(DiffSelectionType.None, null, this.selectableLines);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this selection instance with a specified set of
|
||||
* selectable lines. By default a DiffSelection instance allows selecting
|
||||
* all lines (in fact, it has no notion of how many lines exists or what
|
||||
* it is that is being selected).
|
||||
*
|
||||
* If the selection instance lacks a set of selectable lines it can not
|
||||
* supply an accurate value from getSelectionType when the selection of
|
||||
* all lines have diverged from the default state (since it doesn't know
|
||||
* what all lines mean).
|
||||
*/
|
||||
public withSelectableLines(selectableLines: Set<number>) {
|
||||
const divergingLines = this.divergingLines
|
||||
? new Set([...this.divergingLines].filter((x) => selectableLines.has(x)))
|
||||
: null;
|
||||
|
||||
return new DiffSelection(this.defaultSelectionType, divergingLines, selectableLines);
|
||||
}
|
||||
}
|
||||
3
packages/noodl-git/src/core/models/diff.ts
Normal file
3
packages/noodl-git/src/core/models/diff.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './diff-selection';
|
||||
export * from './diff-data';
|
||||
export * from './diff-line';
|
||||
38
packages/noodl-git/src/core/models/merge.ts
Normal file
38
packages/noodl-git/src/core/models/merge.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { ComputedAction } from './computed-action';
|
||||
|
||||
interface IBlobResult {
|
||||
readonly mode: string;
|
||||
readonly sha: string;
|
||||
readonly path: string;
|
||||
}
|
||||
|
||||
export interface IMergeTreeEntry {
|
||||
readonly context: string;
|
||||
readonly base?: IBlobResult;
|
||||
readonly result?: IBlobResult;
|
||||
readonly our?: IBlobResult;
|
||||
readonly their?: IBlobResult;
|
||||
readonly diff: string;
|
||||
readonly hasConflicts?: boolean;
|
||||
}
|
||||
|
||||
export type MergeTreeSuccess = {
|
||||
readonly kind: ComputedAction.Clean;
|
||||
readonly entries: ReadonlyArray<IMergeTreeEntry>;
|
||||
};
|
||||
|
||||
export type MergeTreeError = {
|
||||
readonly kind: ComputedAction.Conflicts;
|
||||
readonly conflictedFiles: number;
|
||||
readonly conflictedEntries: ReadonlyArray<IMergeTreeEntry>;
|
||||
};
|
||||
|
||||
export type MergeTreeUnsupported = {
|
||||
readonly kind: ComputedAction.Invalid;
|
||||
};
|
||||
|
||||
export type MergeTreeLoading = {
|
||||
readonly kind: ComputedAction.Loading;
|
||||
};
|
||||
|
||||
export type MergeTreeResult = MergeTreeSuccess | MergeTreeError | MergeTreeUnsupported | MergeTreeLoading;
|
||||
119
packages/noodl-git/src/core/models/progress.ts
Normal file
119
packages/noodl-git/src/core/models/progress.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
/**
|
||||
* Base interface containing all the properties that progress events
|
||||
* need to support.
|
||||
*/
|
||||
interface IProgress {
|
||||
/**
|
||||
* The overall progress of the operation, represented as a fraction between
|
||||
* 0 and 1.
|
||||
*/
|
||||
readonly value: number;
|
||||
|
||||
/**
|
||||
* An informative text for user consumption indicating the current operation
|
||||
* state. This will be high level such as 'Pushing origin' or
|
||||
* 'Fetching upstream' and will typically persist over a number of progress
|
||||
* events. For more detailed information about the progress see
|
||||
* the description field
|
||||
*/
|
||||
readonly title?: string;
|
||||
|
||||
/**
|
||||
* An informative text for user consumption. In the case of git progress this
|
||||
* will usually be the last raw line of output from git.
|
||||
*/
|
||||
readonly description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* An object describing progression of an operation that can't be
|
||||
* directly mapped or attributed to either one of the more specific
|
||||
* progress events (Fetch, Checkout etc). An example of this would be
|
||||
* our own refreshing of internal repository state that takes part
|
||||
* after fetch, push and pull.
|
||||
*/
|
||||
export interface IGenericProgress extends IProgress {
|
||||
kind: 'generic';
|
||||
}
|
||||
|
||||
/**
|
||||
* An object describing the progression of a branch checkout operation
|
||||
*/
|
||||
export interface ICheckoutProgress extends IProgress {
|
||||
kind: 'checkout';
|
||||
|
||||
/** The branch that's currently being checked out */
|
||||
readonly targetBranch: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* An object describing the progression of a fetch operation
|
||||
*/
|
||||
export interface IFetchProgress extends IProgress {
|
||||
kind: 'fetch';
|
||||
|
||||
/**
|
||||
* The remote that's being fetched
|
||||
*/
|
||||
readonly remote: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* An object describing the progression of a pull operation
|
||||
*/
|
||||
export interface IPullProgress extends IProgress {
|
||||
kind: 'pull';
|
||||
|
||||
/**
|
||||
* The remote that's being pulled from
|
||||
*/
|
||||
readonly remote: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* An object describing the progression of a pull operation
|
||||
*/
|
||||
export interface IPushProgress extends IProgress {
|
||||
kind: 'push';
|
||||
|
||||
/**
|
||||
* The remote that's being pushed to
|
||||
*/
|
||||
readonly remote: string;
|
||||
|
||||
/**
|
||||
* The branch that's being pushed
|
||||
*/
|
||||
readonly branch: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* An object describing the progression of a fetch operation
|
||||
*/
|
||||
export interface ICloneProgress extends IProgress {
|
||||
kind: 'clone';
|
||||
}
|
||||
|
||||
/** An object describing the progression of a revert operation. */
|
||||
export interface IRevertProgress extends IProgress {
|
||||
kind: 'revert';
|
||||
}
|
||||
|
||||
export interface IMultiCommitOperationProgress extends IProgress {
|
||||
readonly kind: 'multiCommitOperation';
|
||||
/** The summary of the commit applied */
|
||||
readonly currentCommitSummary: string;
|
||||
/** The number to signify which commit in a selection is being applied */
|
||||
readonly position: number;
|
||||
/** The total number of commits in the operation */
|
||||
readonly totalCommitCount: number;
|
||||
}
|
||||
|
||||
export type Progress =
|
||||
| IGenericProgress
|
||||
| ICheckoutProgress
|
||||
| IFetchProgress
|
||||
| IPullProgress
|
||||
| IPushProgress
|
||||
| IRevertProgress
|
||||
| IMultiCommitOperationProgress;
|
||||
27
packages/noodl-git/src/core/models/remote.ts
Normal file
27
packages/noodl-git/src/core/models/remote.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
/** A remote as defined in Git. */
|
||||
export interface IRemote {
|
||||
readonly name: string;
|
||||
readonly url: string;
|
||||
}
|
||||
|
||||
/** A remote as defined in Git. */
|
||||
export interface IRemote {
|
||||
readonly name: string;
|
||||
readonly url: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a value indicating whether two remotes can be considered
|
||||
* structurally equivalent to each other.
|
||||
*/
|
||||
export function remoteEquals(x: IRemote | null, y: IRemote | null) {
|
||||
if (x === y) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (x === null || y === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return x.name === y.name && x.url === y.url;
|
||||
}
|
||||
141
packages/noodl-git/src/core/models/snapshot.ts
Normal file
141
packages/noodl-git/src/core/models/snapshot.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
import { getCommitFiles } from "../diff-tree";
|
||||
import { getBlobContents } from "../show";
|
||||
import { getStashedFiles } from "../stash-files";
|
||||
import { CommitIdentity } from "./commit-identity";
|
||||
import { FileChange } from "./status";
|
||||
import { CommittedFileChange } from "./status";
|
||||
|
||||
/** A snapshot entry can either be a Commit or a Stash */
|
||||
export interface SnapshotEntry {
|
||||
/** The shapshot SHA. */
|
||||
readonly sha: string;
|
||||
/**
|
||||
* Information about the author of this commit.
|
||||
* Includes name, email and date.
|
||||
*/
|
||||
readonly author: CommitIdentity;
|
||||
|
||||
getFileAsString(name: string): Promise<string>;
|
||||
getFiles(): Promise<readonly FileChange[]>;
|
||||
}
|
||||
|
||||
/**
|
||||
* A minimal shape of data to represent a commit, for situations where the
|
||||
* application does not require the full commit metadata.
|
||||
*
|
||||
* Equivalent to the output where Git command support the
|
||||
* `--oneline --no-abbrev-commit` arguments to format a commit.
|
||||
*/
|
||||
export type CommitOneLine = {
|
||||
/** The full commit id associated with the commit */
|
||||
readonly sha: string;
|
||||
/** The first line of the commit message */
|
||||
readonly summary: string;
|
||||
};
|
||||
|
||||
/** A git commit. */
|
||||
export class Commit implements SnapshotEntry {
|
||||
/**
|
||||
* A value indicating whether the author and the committer
|
||||
* are the same person.
|
||||
*/
|
||||
public readonly authoredByCommitter: boolean;
|
||||
|
||||
/**
|
||||
* Whether or not the commit is a merge commit (i.e. has at least 2 parents)
|
||||
*/
|
||||
public readonly isMergeCommit: boolean;
|
||||
|
||||
/**
|
||||
* @param sha The commit's SHA.
|
||||
* @param shortSha The commit's shortSHA.
|
||||
* @param summary The first line of the commit message.
|
||||
* @param body The commit message without the first line and CR.
|
||||
* @param author Information about the author of this commit.
|
||||
* Includes name, email and date.
|
||||
* @param committer Information about the committer of this commit.
|
||||
* Includes name, email and date.
|
||||
* @param parentSHAs The SHAs for the parents of the commit.
|
||||
* @param tags Tags associated with this commit.
|
||||
*/
|
||||
public constructor(
|
||||
public readonly repositoryDir: string,
|
||||
public readonly sha: string,
|
||||
public readonly shortSha: string,
|
||||
public readonly summary: string,
|
||||
public readonly body: string,
|
||||
public readonly author: CommitIdentity,
|
||||
public readonly committer: CommitIdentity,
|
||||
public readonly parentSHAs: ReadonlyArray<string>,
|
||||
public readonly tags: ReadonlyArray<string>
|
||||
) {
|
||||
this.authoredByCommitter =
|
||||
this.author &&
|
||||
this.author.name === this.committer.name &&
|
||||
this.author.email === this.committer.email;
|
||||
|
||||
this.isMergeCommit = parentSHAs.length > 1;
|
||||
}
|
||||
|
||||
public async getFileAsString(name: string): Promise<string> {
|
||||
return await getBlobContents(this.repositoryDir, this.sha, name);
|
||||
}
|
||||
|
||||
public getFiles(): Promise<readonly FileChange[]> {
|
||||
return getCommitFiles(this.repositoryDir, this.sha);
|
||||
}
|
||||
}
|
||||
|
||||
export class Stash implements SnapshotEntry {
|
||||
/**
|
||||
*
|
||||
* @param repositoryDir
|
||||
* @param name The fully qualified name of the entry i.e., `refs/stash@{0}`
|
||||
* @param sha The SHA of the commit object created as a result of stashing.
|
||||
* @param branchName The name of the branch at the time the entry was created.
|
||||
* @param message
|
||||
* @param author
|
||||
* @param files The list of files this stash touches
|
||||
*/
|
||||
public constructor(
|
||||
public readonly repositoryDir: string,
|
||||
public readonly name: string,
|
||||
public readonly branchName: string,
|
||||
public readonly sha: string,
|
||||
public readonly message: string,
|
||||
public readonly author: CommitIdentity,
|
||||
public readonly files: StashedFileChanges
|
||||
) {}
|
||||
|
||||
public async getFileAsString(name: string): Promise<string> {
|
||||
return await getBlobContents(this.repositoryDir, this.sha, name);
|
||||
}
|
||||
|
||||
public getFiles(): Promise<readonly FileChange[]> {
|
||||
return getStashedFiles(this.repositoryDir, this.sha);
|
||||
}
|
||||
}
|
||||
|
||||
/** Whether file changes for a stash entry are loaded or not */
|
||||
export enum StashedChangesLoadStates {
|
||||
NotLoaded = "NotLoaded",
|
||||
Loading = "Loading",
|
||||
Loaded = "Loaded",
|
||||
}
|
||||
|
||||
/**
|
||||
* The status of stashed file changes
|
||||
*
|
||||
* When the status us `Loaded` all the files associated
|
||||
* with the stash are made available.
|
||||
*/
|
||||
export type StashedFileChanges =
|
||||
| {
|
||||
readonly kind:
|
||||
| StashedChangesLoadStates.NotLoaded
|
||||
| StashedChangesLoadStates.Loading;
|
||||
}
|
||||
| {
|
||||
readonly kind: StashedChangesLoadStates.Loaded;
|
||||
readonly files: ReadonlyArray<CommittedFileChange>;
|
||||
};
|
||||
361
packages/noodl-git/src/core/models/status.ts
Normal file
361
packages/noodl-git/src/core/models/status.ts
Normal file
@@ -0,0 +1,361 @@
|
||||
import { DiffSelection, DiffSelectionType } from './diff';
|
||||
|
||||
/**
|
||||
* The status entry code as reported by Git.
|
||||
*/
|
||||
export enum GitStatusEntry {
|
||||
Modified = 'M',
|
||||
Added = 'A',
|
||||
Deleted = 'D',
|
||||
Renamed = 'R',
|
||||
Copied = 'C',
|
||||
Unchanged = '.',
|
||||
Untracked = '?',
|
||||
Ignored = '!',
|
||||
UpdatedButUnmerged = 'U'
|
||||
}
|
||||
|
||||
/** The enum representation of a Git file change */
|
||||
export enum FileStatusKind {
|
||||
New = 'New',
|
||||
Modified = 'Modified',
|
||||
Deleted = 'Deleted',
|
||||
Copied = 'Copied',
|
||||
Renamed = 'Renamed',
|
||||
Conflicted = 'Conflicted',
|
||||
Untracked = 'Untracked'
|
||||
}
|
||||
|
||||
/** Normal changes to a repository detected */
|
||||
export type PlainFileStatus = {
|
||||
kind: FileStatusKind.New | FileStatusKind.Modified | FileStatusKind.Deleted;
|
||||
};
|
||||
|
||||
/**
|
||||
* Copied or renamed files are change staged in the index that have a source
|
||||
* as well as a destination.
|
||||
*
|
||||
* The `oldPath` of a copied file also exists in the working directory, but the
|
||||
* `oldPath` of a renamed file will be missing from the working directory.
|
||||
*/
|
||||
export type CopiedOrRenamedFileStatus = {
|
||||
kind: FileStatusKind.Copied | FileStatusKind.Renamed;
|
||||
oldPath: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Details about a file marked as conflicted in the index which may have
|
||||
* conflict markers to inspect.
|
||||
*/
|
||||
export type ConflictsWithMarkers = {
|
||||
kind: FileStatusKind.Conflicted;
|
||||
entry: TextConflictEntry;
|
||||
conflictMarkerCount: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Details about a file marked as conflicted in the index which needs to be
|
||||
* resolved manually by the user.
|
||||
*/
|
||||
export type ManualConflict = {
|
||||
kind: FileStatusKind.Conflicted;
|
||||
entry: ManualConflictEntry;
|
||||
};
|
||||
|
||||
/** Union of potential conflict scenarios the application should handle */
|
||||
export type ConflictedFileStatus = ConflictsWithMarkers | ManualConflict;
|
||||
|
||||
/** Custom typeguard to differentiate Conflict files from other types */
|
||||
export function isConflictedFileStatus(appFileStatus: AppFileStatus): appFileStatus is ConflictedFileStatus {
|
||||
return appFileStatus.kind === FileStatusKind.Conflicted;
|
||||
}
|
||||
|
||||
/** Custom typeguard to differentiate ConflictsWithMarkers from other Conflict types */
|
||||
export function isConflictWithMarkers(
|
||||
conflictedFileStatus: ConflictedFileStatus
|
||||
): conflictedFileStatus is ConflictsWithMarkers {
|
||||
return conflictedFileStatus.hasOwnProperty('conflictMarkerCount');
|
||||
}
|
||||
|
||||
/** Custom typeguard to differentiate ManualConflict from other Conflict types */
|
||||
export function isManualConflict(conflictedFileStatus: ConflictedFileStatus): conflictedFileStatus is ManualConflict {
|
||||
return !conflictedFileStatus.hasOwnProperty('conflictMarkerCount');
|
||||
}
|
||||
|
||||
/** Denotes an untracked file in the working directory) */
|
||||
export type UntrackedFileStatus = { kind: FileStatusKind.Untracked };
|
||||
|
||||
/** The union of potential states associated with a file change in Desktop */
|
||||
export type AppFileStatus = PlainFileStatus | CopiedOrRenamedFileStatus | ConflictedFileStatus | UntrackedFileStatus;
|
||||
|
||||
/** The porcelain status for an ordinary changed entry */
|
||||
type OrdinaryEntry = {
|
||||
readonly kind: 'ordinary';
|
||||
/** how we should represent the file in the application */
|
||||
readonly type: 'added' | 'modified' | 'deleted';
|
||||
/** the status of the index for this entry (if known) */
|
||||
readonly index?: GitStatusEntry;
|
||||
/** the status of the working tree for this entry (if known) */
|
||||
readonly workingTree?: GitStatusEntry;
|
||||
};
|
||||
|
||||
/** The porcelain status for a renamed or copied entry */
|
||||
type RenamedOrCopiedEntry = {
|
||||
readonly kind: 'renamed' | 'copied';
|
||||
/** the status of the index for this entry (if known) */
|
||||
readonly index?: GitStatusEntry;
|
||||
/** the status of the working tree for this entry (if known) */
|
||||
readonly workingTree?: GitStatusEntry;
|
||||
};
|
||||
|
||||
export enum UnmergedEntrySummary {
|
||||
AddedByUs = 'added-by-us',
|
||||
DeletedByUs = 'deleted-by-us',
|
||||
AddedByThem = 'added-by-them',
|
||||
DeletedByThem = 'deleted-by-them',
|
||||
BothDeleted = 'both-deleted',
|
||||
BothAdded = 'both-added',
|
||||
BothModified = 'both-modified'
|
||||
}
|
||||
|
||||
/**
|
||||
* Valid Git index states that the application should detect text conflict
|
||||
* markers
|
||||
*/
|
||||
type TextConflictDetails =
|
||||
| {
|
||||
readonly action: UnmergedEntrySummary.BothAdded;
|
||||
readonly us: GitStatusEntry.Added;
|
||||
readonly them: GitStatusEntry.Added;
|
||||
}
|
||||
| {
|
||||
readonly action: UnmergedEntrySummary.BothModified;
|
||||
readonly us: GitStatusEntry.UpdatedButUnmerged;
|
||||
readonly them: GitStatusEntry.UpdatedButUnmerged;
|
||||
};
|
||||
|
||||
type TextConflictEntry = {
|
||||
readonly kind: 'conflicted';
|
||||
} & TextConflictDetails;
|
||||
|
||||
/**
|
||||
* Valid Git index states where the user needs to choose one of `us` or `them`
|
||||
* in the app.
|
||||
*/
|
||||
type ManualConflictDetails =
|
||||
| {
|
||||
readonly action: UnmergedEntrySummary.BothAdded;
|
||||
readonly us: GitStatusEntry.Added;
|
||||
readonly them: GitStatusEntry.Added;
|
||||
}
|
||||
| {
|
||||
readonly action: UnmergedEntrySummary.BothModified;
|
||||
readonly us: GitStatusEntry.UpdatedButUnmerged;
|
||||
readonly them: GitStatusEntry.UpdatedButUnmerged;
|
||||
}
|
||||
| {
|
||||
readonly action: UnmergedEntrySummary.AddedByUs;
|
||||
readonly us: GitStatusEntry.Added;
|
||||
readonly them: GitStatusEntry.UpdatedButUnmerged;
|
||||
}
|
||||
| {
|
||||
readonly action: UnmergedEntrySummary.DeletedByThem;
|
||||
readonly us: GitStatusEntry.UpdatedButUnmerged;
|
||||
readonly them: GitStatusEntry.Deleted;
|
||||
}
|
||||
| {
|
||||
readonly action: UnmergedEntrySummary.AddedByThem;
|
||||
readonly us: GitStatusEntry.UpdatedButUnmerged;
|
||||
readonly them: GitStatusEntry.Added;
|
||||
}
|
||||
| {
|
||||
readonly action: UnmergedEntrySummary.DeletedByUs;
|
||||
readonly us: GitStatusEntry.Deleted;
|
||||
readonly them: GitStatusEntry.UpdatedButUnmerged;
|
||||
}
|
||||
| {
|
||||
readonly action: UnmergedEntrySummary.BothDeleted;
|
||||
readonly us: GitStatusEntry.Deleted;
|
||||
readonly them: GitStatusEntry.Deleted;
|
||||
};
|
||||
|
||||
type ManualConflictEntry = {
|
||||
readonly kind: 'conflicted';
|
||||
} & ManualConflictDetails;
|
||||
|
||||
/** The porcelain status for an unmerged entry */
|
||||
export type UnmergedEntry = TextConflictEntry | ManualConflictEntry;
|
||||
|
||||
/** The porcelain status for an unmerged entry */
|
||||
type UntrackedEntry = {
|
||||
readonly kind: 'untracked';
|
||||
};
|
||||
|
||||
/** The union of possible entries from the git status */
|
||||
export type FileEntry = OrdinaryEntry | RenamedOrCopiedEntry | UnmergedEntry | UntrackedEntry;
|
||||
|
||||
/** encapsulate changes to a file associated with a commit */
|
||||
export class FileChange {
|
||||
/** An ID for the file change. */
|
||||
public readonly id: string;
|
||||
|
||||
public get extension(): string | undefined {
|
||||
const split = this.path.split('.');
|
||||
if (split.length > 0) {
|
||||
return '.' + split.at(-1);
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param path The relative path to the file in the repository.
|
||||
* @param status The status of the change to the file.
|
||||
*/
|
||||
public constructor(public readonly path: string, public readonly status: AppFileStatus) {
|
||||
if (status.kind === FileStatusKind.Renamed || status.kind === FileStatusKind.Copied) {
|
||||
this.id = `${status.kind}+${path}+${status.oldPath}`;
|
||||
} else {
|
||||
this.id = `${status.kind}+${path}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const imageFileExtensions = new Set(['.png', '.jpg', '.jpeg', '.gif', '.ico', '.webp', '.bmp', '.avif']);
|
||||
|
||||
export function isFileImage(file: FileChange): boolean {
|
||||
return Array.from(imageFileExtensions).some((x) => file.extension === x);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map a given file extension to the related data URL media type
|
||||
*/
|
||||
export function getMediaType(extension: string) {
|
||||
if (extension === '.png') {
|
||||
return 'image/png';
|
||||
}
|
||||
if (extension === '.jpg' || extension === '.jpeg') {
|
||||
return 'image/jpg';
|
||||
}
|
||||
if (extension === '.gif') {
|
||||
return 'image/gif';
|
||||
}
|
||||
if (extension === '.ico') {
|
||||
return 'image/x-icon';
|
||||
}
|
||||
if (extension === '.webp') {
|
||||
return 'image/webp';
|
||||
}
|
||||
if (extension === '.bmp') {
|
||||
return 'image/bmp';
|
||||
}
|
||||
if (extension === '.avif') {
|
||||
return 'image/avif';
|
||||
}
|
||||
|
||||
// fallback value as per the spec
|
||||
return 'text/plain';
|
||||
}
|
||||
|
||||
/** encapsulate the changes to a file in the working directory */
|
||||
export class WorkingDirectoryFileChange extends FileChange {
|
||||
/**
|
||||
* @param path The relative path to the file in the repository.
|
||||
* @param status The status of the change to the file.
|
||||
* @param selection Contains the selection details for this file - all, nothing or partial.
|
||||
* @param oldPath The original path in the case of a renamed file.
|
||||
*/
|
||||
public constructor(path: string, status: AppFileStatus, public readonly selection: DiffSelection) {
|
||||
super(path, status);
|
||||
}
|
||||
|
||||
/** Create a new WorkingDirectoryFileChange with the given includedness. */
|
||||
public withIncludeAll(include: boolean): WorkingDirectoryFileChange {
|
||||
const newSelection = include ? this.selection.withSelectAll() : this.selection.withSelectNone();
|
||||
|
||||
return this.withSelection(newSelection);
|
||||
}
|
||||
|
||||
/** Create a new WorkingDirectoryFileChange with the given diff selection. */
|
||||
public withSelection(selection: DiffSelection): WorkingDirectoryFileChange {
|
||||
return new WorkingDirectoryFileChange(this.path, this.status, selection);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An object encapsulating the changes to a committed file.
|
||||
*
|
||||
* @param status A commit SHA or some other identifier that ultimately
|
||||
* dereferences to a commit. This is the pointer to the
|
||||
* 'after' version of this change. I.e. the parent of this
|
||||
* commit will contain the 'before' (or nothing, if the
|
||||
* file change represents a new file).
|
||||
*/
|
||||
export class CommittedFileChange extends FileChange {
|
||||
public constructor(path: string, status: AppFileStatus, public readonly commitish: string) {
|
||||
super(path, status);
|
||||
|
||||
this.commitish = commitish;
|
||||
}
|
||||
}
|
||||
|
||||
/** the state of the working directory for a repository */
|
||||
export class WorkingDirectoryStatus {
|
||||
/** Create a new status with the given files. */
|
||||
public static fromFiles(files: ReadonlyArray<WorkingDirectoryFileChange>): WorkingDirectoryStatus {
|
||||
return new WorkingDirectoryStatus(files, getIncludeAllState(files));
|
||||
}
|
||||
|
||||
private readonly fileIxById = new Map<string, number>();
|
||||
|
||||
/**
|
||||
* @param files The list of changes in the repository's working directory.
|
||||
* @param includeAll Update the include checkbox state of the form.
|
||||
* NOTE: we need to track this separately to the file list selection
|
||||
* and perform two-way binding manually when this changes.
|
||||
*/
|
||||
private constructor(
|
||||
public readonly files: ReadonlyArray<WorkingDirectoryFileChange>,
|
||||
public readonly includeAll: boolean | null = true
|
||||
) {
|
||||
files.forEach((f, ix) => this.fileIxById.set(f.id, ix));
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the include state of all files in the working directory
|
||||
*/
|
||||
public withIncludeAllFiles(includeAll: boolean): WorkingDirectoryStatus {
|
||||
const newFiles = this.files.map((f) => f.withIncludeAll(includeAll));
|
||||
return new WorkingDirectoryStatus(newFiles, includeAll);
|
||||
}
|
||||
|
||||
/** Find the file with the given ID. */
|
||||
public findFileWithID(id: string): WorkingDirectoryFileChange | null {
|
||||
const ix = this.fileIxById.get(id);
|
||||
return ix !== undefined ? this.files[ix] || null : null;
|
||||
}
|
||||
|
||||
/** Find the index of the file with the given ID. Returns -1 if not found */
|
||||
public findFileIndexByID(id: string): number {
|
||||
const ix = this.fileIxById.get(id);
|
||||
return ix !== undefined ? ix : -1;
|
||||
}
|
||||
}
|
||||
|
||||
function getIncludeAllState(files: ReadonlyArray<WorkingDirectoryFileChange>): boolean | null {
|
||||
if (!files.length) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const allSelected = files.every((f) => f.selection.getSelectionType() === DiffSelectionType.All);
|
||||
const noneSelected = files.every((f) => f.selection.getSelectionType() === DiffSelectionType.None);
|
||||
|
||||
let includeAll: boolean | null = null;
|
||||
if (allSelected) {
|
||||
includeAll = true;
|
||||
} else if (noneSelected) {
|
||||
includeAll = false;
|
||||
}
|
||||
|
||||
return includeAll;
|
||||
}
|
||||
73
packages/noodl-git/src/core/open.ts
Normal file
73
packages/noodl-git/src/core/open.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import * as Path from 'path';
|
||||
import { RepositoryDoesNotExistErrorCode } from 'dugite';
|
||||
|
||||
import { git } from './client';
|
||||
|
||||
export async function open(basePath: string): Promise<string> {
|
||||
// const path = await NodeGit.Repository.discover(this._baseDir, 0, '')
|
||||
// this.repository = await NodeGit.Repository.open(path)
|
||||
// await this._setupRepository();
|
||||
//
|
||||
// // Check for conflicts
|
||||
// try {
|
||||
// let index = await this.repository.index();
|
||||
// if (index.hasConflicts()) {
|
||||
// //just guess what's "ours" and "theirs", we don't really know what state the repo is in
|
||||
// const filePaths = await this._solveConflicts(index, INDEX_STAGE_OURS, INDEX_STAGE_THEIRS);
|
||||
// await Promise.all(filePaths.map(path => index.addByPath(path)));
|
||||
// await index.write();
|
||||
// await index.writeTree();
|
||||
// }
|
||||
// } catch (e) {
|
||||
// console.log("VCS error when opening project: " + e);
|
||||
// }
|
||||
|
||||
return basePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the absolute path to the top level working directory.
|
||||
*
|
||||
* @param path The path to a presumptive Git repository, either the root
|
||||
* of the repository or any path within that repository.
|
||||
*
|
||||
* @returns null if the path provided doesn't reside within a Git repository.
|
||||
*/
|
||||
export async function getTopLevelWorkingDirectory(path: string): Promise<string | null> {
|
||||
let result;
|
||||
|
||||
try {
|
||||
// Note, we use --show-cdup here instead of --show-toplevel because show-toplevel
|
||||
// dereferences symlinks and we want to resolve a path as closely as possible to
|
||||
// what the user gave us.
|
||||
result = await git(['rev-parse', '--show-cdup'], path, 'getTopLevelWorkingDirectory', {
|
||||
successExitCodes: new Set([0, 128])
|
||||
});
|
||||
} catch (err) {
|
||||
if (err.code === RepositoryDoesNotExistErrorCode) {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
|
||||
// Exit code 128 means it was run in a directory that's not a git
|
||||
// repository.
|
||||
if (result.exitCode === 128) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const relativePath = result.output.toString().trim();
|
||||
|
||||
// No output means we're already at the root
|
||||
if (!relativePath) {
|
||||
return path;
|
||||
}
|
||||
|
||||
return Path.resolve(path, relativePath);
|
||||
}
|
||||
|
||||
/** Is the path a git repository? */
|
||||
export async function isGitRepository(path: string): Promise<boolean> {
|
||||
return (await getTopLevelWorkingDirectory(path)) !== null;
|
||||
}
|
||||
309
packages/noodl-git/src/core/patch-formatter.ts
Normal file
309
packages/noodl-git/src/core/patch-formatter.ts
Normal file
@@ -0,0 +1,309 @@
|
||||
import { WorkingDirectoryFileChange, FileStatusKind } from './models/status';
|
||||
import { DiffLineType, ITextDiff, DiffSelection, ILargeTextDiff } from './models/diff';
|
||||
|
||||
/**
|
||||
* Generates a string matching the format of a GNU unified diff header excluding
|
||||
* the (optional) timestamp fields
|
||||
*
|
||||
* Note that this multi-line string includes a trailing newline.
|
||||
*
|
||||
* @param from The relative path to the original version of the file or
|
||||
* null if the file is newly created.
|
||||
*
|
||||
* @param to The relative path to the new version of the file or
|
||||
* null if the file is the file is newly created.
|
||||
*/
|
||||
function formatPatchHeader(from: string | null, to: string | null): string {
|
||||
// https://en.wikipedia.org/wiki/Diff_utility
|
||||
//
|
||||
// > At the beginning of the patch is the file information, including the full
|
||||
// > path and a time stamp delimited by a tab character.
|
||||
// >
|
||||
// > [...] the original file is preceded by "---" and the new file is preceded
|
||||
// > by "+++".
|
||||
//
|
||||
// We skip the time stamp to match git
|
||||
const fromPath = from ? `a/${from}` : '/dev/null';
|
||||
const toPath = to ? `b/${to}` : '/dev/null';
|
||||
|
||||
return `--- ${fromPath}\n+++ ${toPath}\n`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a string matching the format of a GNU unified diff header excluding
|
||||
* the (optional) timestamp fields with the appropriate from/to file names based
|
||||
* on the file state of the given WorkingDirectoryFileChange
|
||||
*/
|
||||
function formatPatchHeaderForFile(file: WorkingDirectoryFileChange) {
|
||||
switch (file.status.kind) {
|
||||
case FileStatusKind.New:
|
||||
case FileStatusKind.Untracked:
|
||||
return formatPatchHeader(null, file.path);
|
||||
|
||||
// One might initially believe that renamed files should diff
|
||||
// against their old path. This is, after all, how git diff
|
||||
// does it right after a rename. But if we're creating a patch
|
||||
// to be applied along with a rename we must target the renamed
|
||||
// file.
|
||||
case FileStatusKind.Renamed:
|
||||
case FileStatusKind.Deleted:
|
||||
case FileStatusKind.Modified:
|
||||
case FileStatusKind.Copied:
|
||||
// We should not have the ability to format a file that's marked as
|
||||
// conflicted without more information about it's current state.
|
||||
// I'd like to get to a point where `WorkingDirectoryFileChange` can be
|
||||
// differentiated between ordinary, renamed/copied and unmerged entries
|
||||
// and we can then verify the conflicted file is in a known good state but
|
||||
// that work needs to be done waaaaaaaay before we get to this point.
|
||||
case FileStatusKind.Conflicted:
|
||||
return formatPatchHeader(file.path, file.path);
|
||||
default:
|
||||
throw new Error(`Unknown file status ${file.status}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a string matching the format of a GNU unified diff hunk header.
|
||||
* Note that this single line string includes a single trailing newline.
|
||||
*
|
||||
* @param oldStartLine The line in the old (or original) file where this diff
|
||||
* hunk starts.
|
||||
*
|
||||
* @param oldLineCount The number of lines in the old (or original) file that
|
||||
* this diff hunk covers.
|
||||
*
|
||||
* @param newStartLine The line in the new file where this diff hunk starts
|
||||
*
|
||||
* @param newLineCount The number of lines in the new file that this diff hunk
|
||||
* covers
|
||||
*/
|
||||
function formatHunkHeader(
|
||||
oldStartLine: number,
|
||||
oldLineCount: number,
|
||||
newStartLine: number,
|
||||
newLineCount: number,
|
||||
sectionHeading?: string | null
|
||||
) {
|
||||
// > @@ -l,s +l,s @@ optional section heading
|
||||
// >
|
||||
// > The hunk range information contains two hunk ranges. The range for the hunk of the original
|
||||
// > file is preceded by a minus symbol, and the range for the new file is preceded by a plus
|
||||
// > symbol. Each hunk range is of the format l,s where l is the starting line number and s is
|
||||
// > the number of lines the change hunk applies to for each respective file.
|
||||
// >
|
||||
// > In many versions of GNU diff, each range can omit the comma and trailing value s,
|
||||
// > in which case s defaults to 1
|
||||
const lineInfoBefore = oldLineCount === 1 ? `${oldStartLine}` : `${oldStartLine},${oldLineCount}`;
|
||||
|
||||
const lineInfoAfter = newLineCount === 1 ? `${newStartLine}` : `${newStartLine},${newLineCount}`;
|
||||
|
||||
sectionHeading = sectionHeading ? ` ${sectionHeading}` : '';
|
||||
|
||||
return `@@ -${lineInfoBefore} +${lineInfoAfter} @@${sectionHeading}\n`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a GNU unified diff based on the original diff and a number
|
||||
* of selected or unselected lines (from file.selection). The patch is
|
||||
* formatted with the intention of being used for applying against an index
|
||||
* with git apply.
|
||||
*
|
||||
* Note that the file must have at least one selected addition or deletion,
|
||||
* ie it's not supported to use this method as a general purpose diff
|
||||
* formatter.
|
||||
*
|
||||
* @param file The file that the resulting patch will be applied to.
|
||||
* This is used to determine the from and to paths for the
|
||||
* patch header as well as retrieving the line selection state
|
||||
*
|
||||
* @param diff The source diff
|
||||
*/
|
||||
export function formatPatch(file: WorkingDirectoryFileChange, diff: ITextDiff | ILargeTextDiff): string {
|
||||
let patch = '';
|
||||
|
||||
diff.hunks.forEach((hunk, hunkIndex) => {
|
||||
let hunkBuf = '';
|
||||
|
||||
let oldCount = 0;
|
||||
let newCount = 0;
|
||||
|
||||
let anyAdditionsOrDeletions = false;
|
||||
|
||||
hunk.lines.forEach((line, lineIndex) => {
|
||||
const absoluteIndex = hunk.unifiedDiffStart + lineIndex;
|
||||
|
||||
// We write our own hunk headers
|
||||
if (line.type === DiffLineType.Hunk) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Context lines can always be let through, they will
|
||||
// never appear for new files.
|
||||
if (line.type === DiffLineType.Context) {
|
||||
hunkBuf += `${line.text}\n`;
|
||||
oldCount++;
|
||||
newCount++;
|
||||
} else if (file.selection.isSelected(absoluteIndex)) {
|
||||
// A line selected for inclusion.
|
||||
|
||||
// Use the line as-is
|
||||
hunkBuf += `${line.text}\n`;
|
||||
|
||||
if (line.type === DiffLineType.Add) {
|
||||
newCount++;
|
||||
}
|
||||
if (line.type === DiffLineType.Delete) {
|
||||
oldCount++;
|
||||
}
|
||||
|
||||
anyAdditionsOrDeletions = true;
|
||||
} else {
|
||||
// Unselected lines in new files needs to be ignored. A new file by
|
||||
// definition only consists of additions and therefore so will the
|
||||
// partial patch. If the user has elected not to commit a particular
|
||||
// addition we need to generate a patch that pretends that the line
|
||||
// never existed.
|
||||
if (file.status.kind === FileStatusKind.New || file.status.kind === FileStatusKind.Untracked) {
|
||||
return;
|
||||
}
|
||||
|
||||
// An unselected added line has no impact on this patch, pretend
|
||||
// it was never added to the old file by dropping it.
|
||||
if (line.type === DiffLineType.Add) {
|
||||
return;
|
||||
}
|
||||
|
||||
// An unselected deleted line has never happened as far as this patch
|
||||
// is concerned which means that we should treat it as if it's still
|
||||
// in the old file so we'll convert it to a context line.
|
||||
if (line.type === DiffLineType.Delete) {
|
||||
hunkBuf += ` ${line.text.substr(1)}\n`;
|
||||
oldCount++;
|
||||
newCount++;
|
||||
} else {
|
||||
// Guarantee that we've covered all the line types
|
||||
throw new Error(`Unsupported line type ${line.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (line.noTrailingNewLine) {
|
||||
hunkBuf += '\\ No newline at end of file\n';
|
||||
}
|
||||
});
|
||||
|
||||
// Skip writing this hunk if all there is is context lines.
|
||||
if (!anyAdditionsOrDeletions) {
|
||||
return;
|
||||
}
|
||||
|
||||
patch += formatHunkHeader(hunk.header.oldStartLine, oldCount, hunk.header.newStartLine, newCount);
|
||||
patch += hunkBuf;
|
||||
});
|
||||
|
||||
// If we get into this state we should never have been called in the first
|
||||
// place. Someone gave us a faulty diff and/or faulty selection state.
|
||||
if (!patch.length) {
|
||||
console.debug(`formatPatch: empty path for ${file.path}`);
|
||||
throw new Error(`Could not generate a patch, no changes`);
|
||||
}
|
||||
|
||||
patch = formatPatchHeaderForFile(file) + patch;
|
||||
|
||||
return patch;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a GNU unified diff to discard a set of changes (determined by the selection object)
|
||||
* based on the passed diff and a number of selected or unselected lines.
|
||||
*
|
||||
* The patch is formatted with the intention of being used for applying against an index
|
||||
* with git apply.
|
||||
*
|
||||
* Note that the diff must have at least one selected addition or deletion.
|
||||
*
|
||||
* @param filePath The path of the file that the resulting patch will be applied to.
|
||||
* This is used to determine the from and to paths for the
|
||||
* patch header.
|
||||
* @param diff All the local changes for that file.
|
||||
* @param selection A selection of lines from the diff object that we want to discard.
|
||||
*/
|
||||
export function formatPatchToDiscardChanges(
|
||||
filePath: string,
|
||||
diff: ITextDiff,
|
||||
selection: DiffSelection
|
||||
): string | null {
|
||||
let patch = '';
|
||||
|
||||
diff.hunks.forEach((hunk, hunkIndex) => {
|
||||
let hunkBuf = '';
|
||||
|
||||
let oldCount = 0;
|
||||
let newCount = 0;
|
||||
|
||||
let anyAdditionsOrDeletions = false;
|
||||
|
||||
hunk.lines.forEach((line, lineIndex) => {
|
||||
const absoluteIndex = hunk.unifiedDiffStart + lineIndex;
|
||||
|
||||
// We write our own hunk headers
|
||||
if (line.type === DiffLineType.Hunk) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Context lines can always be let through, they will
|
||||
// never appear for new files.
|
||||
if (line.type === DiffLineType.Context) {
|
||||
hunkBuf += `${line.text}\n`;
|
||||
oldCount++;
|
||||
newCount++;
|
||||
} else if (selection.isSelected(absoluteIndex)) {
|
||||
// Reverse the change (if it was an added line, treat it as removed and vice versa).
|
||||
if (line.type === DiffLineType.Add) {
|
||||
hunkBuf += `-${line.text.substr(1)}\n`;
|
||||
newCount++;
|
||||
} else if (line.type === DiffLineType.Delete) {
|
||||
hunkBuf += `+${line.text.substr(1)}\n`;
|
||||
oldCount++;
|
||||
} else {
|
||||
throw new Error(`Unsupported line type ${line.type}`);
|
||||
}
|
||||
|
||||
anyAdditionsOrDeletions = true;
|
||||
} else {
|
||||
if (line.type === DiffLineType.Add) {
|
||||
// An unselected added line will stay in the file after discarding the changes,
|
||||
// so we just print it untouched on the diff.
|
||||
oldCount++;
|
||||
newCount++;
|
||||
hunkBuf += ` ${line.text.substr(1)}\n`;
|
||||
} else if (line.type === DiffLineType.Delete) {
|
||||
// An unselected removed line has no impact on this patch since it's not
|
||||
// found on the current working copy of the file, so we can ignore it.
|
||||
return;
|
||||
} else {
|
||||
// Guarantee that we've covered all the line types.
|
||||
throw new Error(`Unsupported line type ${line.type}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (line.noTrailingNewLine) {
|
||||
hunkBuf += '\\ No newline at end of file\n';
|
||||
}
|
||||
});
|
||||
|
||||
// Skip writing this hunk if all there is is context lines.
|
||||
if (!anyAdditionsOrDeletions) {
|
||||
return;
|
||||
}
|
||||
|
||||
patch += formatHunkHeader(hunk.header.newStartLine, newCount, hunk.header.oldStartLine, oldCount);
|
||||
patch += hunkBuf;
|
||||
});
|
||||
|
||||
if (patch.length === 0) {
|
||||
// The selection resulted in an empty patch.
|
||||
return null;
|
||||
}
|
||||
|
||||
return formatPatchHeader(filePath, filePath) + patch;
|
||||
}
|
||||
13
packages/noodl-git/src/core/progress/checkout.ts
Normal file
13
packages/noodl-git/src/core/progress/checkout.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { GitProgressParser } from './common';
|
||||
|
||||
const steps = [{ title: 'Checking out files', weight: 1 }];
|
||||
|
||||
/**
|
||||
* A class that parses output from `git checkout --progress` and provides
|
||||
* structured progress events.
|
||||
*/
|
||||
export class CheckoutProgressParser extends GitProgressParser {
|
||||
public constructor() {
|
||||
super(steps);
|
||||
}
|
||||
}
|
||||
27
packages/noodl-git/src/core/progress/clone.ts
Normal file
27
packages/noodl-git/src/core/progress/clone.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { GitProgressParser } from './common';
|
||||
|
||||
/**
|
||||
* Highly approximate (some would say outright inaccurate) division
|
||||
* of the individual progress reporting steps in a clone operation
|
||||
*/
|
||||
const steps = [
|
||||
// NOTE: Accordning to GitHub Counting objects should take the same time
|
||||
// as Compressing objects just I am not getting
|
||||
// the other codes in my tests.
|
||||
{ title: 'remote: Counting objects', weight: 0.9 },
|
||||
{ title: 'remote: Compressing objects', weight: 0.1 },
|
||||
{ title: 'Receiving objects', weight: 0.6 },
|
||||
{ title: 'Resolving deltas', weight: 0.1 },
|
||||
{ title: 'Checking out files', weight: 0.2 }
|
||||
];
|
||||
|
||||
/**
|
||||
* A utility class for interpreting the output from `git clone --progress`
|
||||
* and turning that into a percentage value estimating the overall progress
|
||||
* of the clone.
|
||||
*/
|
||||
export class CloneProgressParser extends GitProgressParser {
|
||||
public constructor() {
|
||||
super(steps);
|
||||
}
|
||||
}
|
||||
307
packages/noodl-git/src/core/progress/common.ts
Normal file
307
packages/noodl-git/src/core/progress/common.ts
Normal file
@@ -0,0 +1,307 @@
|
||||
/**
|
||||
* Identifies a particular subset of progress events from Git by
|
||||
* title.
|
||||
*/
|
||||
export interface IProgressStep {
|
||||
/**
|
||||
* The title of the git progress event. By title we refer to the
|
||||
* exact value of the title field in the Git progress struct:
|
||||
*
|
||||
* https://github.com/git/git/blob/6a2c2f8d34fa1e8f3bb85d159d354810ed63692e/progress.c#L31-L39
|
||||
*
|
||||
* In essence this means anything up to (but not including) the last colon (:)
|
||||
* in a single progress line. Take this example progress line
|
||||
*
|
||||
* remote: Compressing objects: 14% (159/1133)
|
||||
*
|
||||
* In this case the title would be 'remote: Compressing objects'.
|
||||
*/
|
||||
readonly title: string;
|
||||
|
||||
/**
|
||||
* The weight of this step in relation to others for a particular
|
||||
* Git operation. This value can be any number as long as it's
|
||||
* proportional to others in the same parser, it will all be scaled
|
||||
* to a decimal value between 0 and 1 before being used to calculate
|
||||
* overall progress.
|
||||
*/
|
||||
readonly weight: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* The overall progress of one or more steps in a Git operation.
|
||||
*/
|
||||
export interface IGitProgress {
|
||||
readonly kind: 'progress';
|
||||
|
||||
/**
|
||||
* The overall percent of the operation
|
||||
*/
|
||||
readonly percent: number;
|
||||
|
||||
/**
|
||||
* The underlying progress line that this progress instance was
|
||||
* constructed from. Note that the percent value in details
|
||||
* doesn't correspond to that of percent in this instance for
|
||||
* two reasons. Fist, we calculate percent by dividing value with total
|
||||
* to produce a high precision decimal value between 0 and 1 while
|
||||
* details.percent is a rounded integer between 0 and 100.
|
||||
*
|
||||
* Second, the percent in this instance is scaled in relation to any
|
||||
* other steps included in the progress parser.
|
||||
*/
|
||||
readonly details: IGitProgressInfo;
|
||||
}
|
||||
|
||||
export interface IGitOutput {
|
||||
readonly kind: 'context';
|
||||
readonly percent: number;
|
||||
readonly text: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A well-structured representation of a Git progress line.
|
||||
*/
|
||||
export interface IGitProgressInfo {
|
||||
/**
|
||||
* The title of the git progress event. By title we refer to the
|
||||
* exact value of the title field in Git's progress struct:
|
||||
*
|
||||
* https://github.com/git/git/blob/6a2c2f8d34fa1e8f3bb85d159d354810ed63692e/progress.c#L31-L39
|
||||
*
|
||||
* In essence this means anything up to (but not including) the last colon (:)
|
||||
* in a single progress line. Take this example progress line
|
||||
*
|
||||
* remote: Compressing objects: 14% (159/1133)
|
||||
*
|
||||
* In this case the title would be 'remote: Compressing objects'.
|
||||
*/
|
||||
readonly title: string;
|
||||
|
||||
/**
|
||||
* The progress value as parsed from the Git progress line.
|
||||
*
|
||||
* We define value to mean the same as it does in the Git progress struct, i.e
|
||||
* it's the number of processed units.
|
||||
*
|
||||
* In the progress line 'remote: Compressing objects: 14% (159/1133)' the
|
||||
* value is 159.
|
||||
*
|
||||
* In the progress line 'remote: Counting objects: 123' the value is 123.
|
||||
*
|
||||
*/
|
||||
readonly value: number;
|
||||
|
||||
/**
|
||||
* The progress total as parsed from the git progress line.
|
||||
*
|
||||
* We define total to mean the same as it does in the Git progress struct, i.e
|
||||
* it's the total number of units in a given process.
|
||||
*
|
||||
* In the progress line 'remote: Compressing objects: 14% (159/1133)' the
|
||||
* total is 1133.
|
||||
*
|
||||
* In the progress line 'remote: Counting objects: 123' the total is undefined.
|
||||
*
|
||||
*/
|
||||
readonly total?: number;
|
||||
|
||||
/**
|
||||
* The progress percent as parsed from the git progress line represented as
|
||||
* an integer between 0 and 100.
|
||||
*
|
||||
* We define percent to mean the same as it does in the Git progress struct, i.e
|
||||
* it's the value divided by total.
|
||||
*
|
||||
* In the progress line 'remote: Compressing objects: 14% (159/1133)' the
|
||||
* percent is 14.
|
||||
*
|
||||
* In the progress line 'remote: Counting objects: 123' the percent is undefined.
|
||||
*
|
||||
*/
|
||||
readonly percent?: number;
|
||||
|
||||
/**
|
||||
* Whether or not the parsed git progress line indicates that the operation
|
||||
* is done.
|
||||
*
|
||||
* This is denoted by a trailing ", done" string in the progress line.
|
||||
* Example: Checking out files: 100% (728/728), done
|
||||
*/
|
||||
readonly done: boolean;
|
||||
|
||||
/**
|
||||
* The untouched raw text line that this instance was parsed from. Useful
|
||||
* for presenting the actual output from Git to the user.
|
||||
*/
|
||||
readonly text: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A utility class for interpreting progress output from `git`
|
||||
* and turning that into a percentage value estimating the overall progress
|
||||
* of the an operation. An operation could be something like `git fetch`
|
||||
* which contains multiple steps, each individually reported by Git as
|
||||
* progress events between 0 and 100%.
|
||||
*
|
||||
* A parser cannot be reused, it's mean to parse a single stderr stream
|
||||
* for Git.
|
||||
*/
|
||||
export class GitProgressParser {
|
||||
private readonly steps: ReadonlyArray<IProgressStep>;
|
||||
|
||||
/* The provided steps should always occur in order but some
|
||||
* might not happen at all (like remote compression of objects) so
|
||||
* we keep track of the "highest" seen step so that we can fill in
|
||||
* progress with the assumption that we've already seen the previous
|
||||
* steps.
|
||||
*/
|
||||
private stepIndex = 0;
|
||||
|
||||
private lastPercent = 0;
|
||||
|
||||
/**
|
||||
* Initialize a new instance of a Git progress parser.
|
||||
*
|
||||
* @param steps - A series of steps that could be present in the git
|
||||
* output with relative weight between these steps. Note
|
||||
* that order is significant here as once the parser sees
|
||||
* a progress line that matches a step all previous steps
|
||||
* are considered completed and overall progress is adjusted
|
||||
* accordingly.
|
||||
*/
|
||||
public constructor(steps: ReadonlyArray<IProgressStep>) {
|
||||
if (!steps.length) {
|
||||
throw new Error('must specify at least one step');
|
||||
}
|
||||
|
||||
// Scale the step weight so that they're all a percentage
|
||||
// adjusted to the total weight of all steps.
|
||||
const totalStepWeight = steps.reduce((sum, step) => sum + step.weight, 0);
|
||||
|
||||
this.steps = steps.map((step) => ({
|
||||
title: step.title,
|
||||
weight: step.weight / totalStepWeight
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the given line of output from Git, returns either an `IGitProgress`
|
||||
* instance if the line could successfully be parsed as a Git progress
|
||||
* event whose title was registered with this parser or an `IGitOutput`
|
||||
* instance if the line couldn't be parsed or if the title wasn't
|
||||
* registered with the parser.
|
||||
*/
|
||||
public parse(line: string): IGitProgress | IGitOutput {
|
||||
const progress = parse(line);
|
||||
|
||||
if (!progress) {
|
||||
return { kind: 'context', text: line, percent: this.lastPercent };
|
||||
}
|
||||
|
||||
let percent = 0;
|
||||
|
||||
for (let i = 0; i < this.steps.length; i++) {
|
||||
const step = this.steps[i];
|
||||
|
||||
if (i >= this.stepIndex && progress.title === step.title) {
|
||||
if (progress.total) {
|
||||
percent += step.weight * (progress.value / progress.total);
|
||||
}
|
||||
|
||||
this.stepIndex = i;
|
||||
this.lastPercent = percent;
|
||||
|
||||
return { kind: 'progress', percent, details: progress };
|
||||
} else {
|
||||
percent += step.weight;
|
||||
}
|
||||
}
|
||||
|
||||
return { kind: 'context', text: line, percent: this.lastPercent };
|
||||
}
|
||||
}
|
||||
|
||||
const percentRe = /^(\d{1,3})% \((\d+)\/(\d+)\)$/;
|
||||
const valueOnlyRe = /^\d+$/;
|
||||
|
||||
/**
|
||||
* Attempts to parse a single line of progress output from Git.
|
||||
*
|
||||
* For details about how Git formats progress see
|
||||
*
|
||||
* https://github.com/git/git/blob/6a2c2f8d34fa1e8f3bb85d159d354810ed63692e/progress.c
|
||||
*
|
||||
* Some examples:
|
||||
* remote: Counting objects: 123
|
||||
* remote: Counting objects: 167587, done.
|
||||
* Receiving objects: 99% (166741/167587), 272.10 MiB | 2.39 MiB/s
|
||||
* Checking out files: 100% (728/728)
|
||||
* Checking out files: 100% (728/728), done
|
||||
*
|
||||
* @returns An object containing well-structured information about the progress
|
||||
* or null if the line could not be parsed as a Git progress line.
|
||||
*/
|
||||
export function parse(line: string): IGitProgressInfo | null {
|
||||
const titleLength = line.lastIndexOf(': ');
|
||||
|
||||
if (titleLength === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (titleLength - 2 >= line.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const title = line.substring(0, titleLength);
|
||||
const progressText = line.substring(title.length + 2).trim();
|
||||
|
||||
if (!progressText.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const progressParts = progressText.split(', ');
|
||||
|
||||
if (!progressParts.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let value: number;
|
||||
let total: number | undefined = undefined;
|
||||
let percent: number | undefined = undefined;
|
||||
|
||||
if (valueOnlyRe.test(progressParts[0])) {
|
||||
value = parseInt(progressParts[0], 10);
|
||||
|
||||
if (isNaN(value)) {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
const percentMatch = percentRe.exec(progressParts[0]);
|
||||
|
||||
if (!percentMatch || percentMatch.length !== 4) {
|
||||
return null;
|
||||
}
|
||||
|
||||
percent = parseInt(percentMatch[1], 10);
|
||||
value = parseInt(percentMatch[2], 10);
|
||||
total = parseInt(percentMatch[3], 10);
|
||||
|
||||
if (isNaN(percent) || isNaN(value) || isNaN(total)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
let done = false;
|
||||
|
||||
// We don't parse throughput at the moment so let's just loop
|
||||
// through the remaining
|
||||
for (let i = 1; i < progressParts.length; i++) {
|
||||
if (progressParts[i] === 'done.') {
|
||||
done = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return { title, value, percent, total, done, text: line };
|
||||
}
|
||||
22
packages/noodl-git/src/core/progress/fetch.ts
Normal file
22
packages/noodl-git/src/core/progress/fetch.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { GitProgressParser } from './common';
|
||||
|
||||
/**
|
||||
* Highly approximate (some would say outright inaccurate) division
|
||||
* of the individual progress reporting steps in a fetch operation
|
||||
*/
|
||||
const steps = [
|
||||
{ title: 'remote: Compressing objects', weight: 0.1 },
|
||||
{ title: 'Receiving objects', weight: 0.7 },
|
||||
{ title: 'Resolving deltas', weight: 0.2 }
|
||||
];
|
||||
|
||||
/**
|
||||
* A utility class for interpreting the output from `git fetch --progress`
|
||||
* and turning that into a percentage value estimating the overall progress
|
||||
* of the fetch.
|
||||
*/
|
||||
export class FetchProgressParser extends GitProgressParser {
|
||||
public constructor() {
|
||||
super(steps);
|
||||
}
|
||||
}
|
||||
49
packages/noodl-git/src/core/progress/from-process.ts
Normal file
49
packages/noodl-git/src/core/progress/from-process.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { GitProgressParser, IGitProgress, IGitOutput } from './common';
|
||||
import { GitExecutionOptions } from '../client';
|
||||
import { merge } from '../helpers/object';
|
||||
|
||||
/**
|
||||
* Merges an instance of GitExecutionOptions with a process callback provided
|
||||
* by createProgressProcessCallback.
|
||||
*
|
||||
* If the given options object already has a processCallback specified it will
|
||||
* be overwritten.
|
||||
*/
|
||||
export async function executionOptionsWithProgress(
|
||||
options: GitExecutionOptions,
|
||||
parser: GitProgressParser,
|
||||
progressCallback: (progress: IGitProgress | IGitOutput) => void
|
||||
): Promise<GitExecutionOptions> {
|
||||
let env = {};
|
||||
|
||||
return merge(options, {
|
||||
spawn: true,
|
||||
/**
|
||||
* Returns a callback which can be passed along to the processCallback option
|
||||
* in IGitExecution. The callback then takes care of reading stderr of the
|
||||
* process and parsing its contents using the provided parser.
|
||||
*/
|
||||
processDataCallback: (chunk: Buffer) => {
|
||||
const lines = chunk.toString().split(/[\r\n]/g);
|
||||
lines.forEach((line) => {
|
||||
if (!line) return;
|
||||
const progress = parser.parse(line);
|
||||
progressCallback(progress);
|
||||
});
|
||||
},
|
||||
// NOTE: This is we want to use spawn: false later
|
||||
// processCallback: (process) => {
|
||||
// console.log('pid', process.pid);
|
||||
//
|
||||
// const callback = (line: Buffer) => {
|
||||
// console.log(line.toString());
|
||||
// const progress = parser.parse(line.toString());
|
||||
// progressCallback(progress);
|
||||
// }
|
||||
//
|
||||
// byline(process.stdout).on("data", callback);
|
||||
// byline(process.stderr).on("data", callback);
|
||||
// },
|
||||
env: merge(options.env, env)
|
||||
});
|
||||
}
|
||||
7
packages/noodl-git/src/core/progress/index.ts
Normal file
7
packages/noodl-git/src/core/progress/index.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export * from './checkout';
|
||||
export * from './clone';
|
||||
export * from './push';
|
||||
export * from './fetch';
|
||||
export * from './common';
|
||||
export * from './pull';
|
||||
export * from './from-process';
|
||||
27
packages/noodl-git/src/core/progress/pull.ts
Normal file
27
packages/noodl-git/src/core/progress/pull.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { GitProgressParser } from './common';
|
||||
|
||||
/**
|
||||
* Highly approximate (some would say outright inaccurate) division
|
||||
* of the individual progress reporting steps in a pull operation.
|
||||
*
|
||||
* Note: A pull is essentially the same as a fetch except we might
|
||||
* have to check out some files at the end. We assume that these
|
||||
* delta updates are fairly quick though.
|
||||
*/
|
||||
const steps = [
|
||||
{ title: 'remote: Compressing objects', weight: 0.1 },
|
||||
{ title: 'Receiving objects', weight: 0.7 },
|
||||
{ title: 'Resolving deltas', weight: 0.15 },
|
||||
{ title: 'Checking out files', weight: 0.15 }
|
||||
];
|
||||
|
||||
/**
|
||||
* A utility class for interpreting the output from `git pull --progress`
|
||||
* and turning that into a percentage value estimating the overall progress
|
||||
* of the pull.
|
||||
*/
|
||||
export class PullProgressParser extends GitProgressParser {
|
||||
public constructor() {
|
||||
super(steps);
|
||||
}
|
||||
}
|
||||
22
packages/noodl-git/src/core/progress/push.ts
Normal file
22
packages/noodl-git/src/core/progress/push.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { GitProgressParser } from './common';
|
||||
|
||||
/**
|
||||
* Highly approximate (some would say outright inaccurate) division
|
||||
* of the individual progress reporting steps in a push operation
|
||||
*/
|
||||
const steps = [
|
||||
{ title: 'Compressing objects', weight: 0.2 },
|
||||
{ title: 'Writing objects', weight: 0.7 },
|
||||
{ title: 'remote: Resolving deltas', weight: 0.1 }
|
||||
];
|
||||
|
||||
/**
|
||||
* A utility class for interpreting the output from `git push --progress`
|
||||
* and turning that into a percentage value estimating the overall progress
|
||||
* of the clone.
|
||||
*/
|
||||
export class PushProgressParser extends GitProgressParser {
|
||||
public constructor() {
|
||||
super(steps);
|
||||
}
|
||||
}
|
||||
13
packages/noodl-git/src/core/progress/revert.ts
Normal file
13
packages/noodl-git/src/core/progress/revert.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { GitProgressParser, IProgressStep } from './common';
|
||||
|
||||
const steps: ReadonlyArray<IProgressStep> = [{ title: '', weight: 0 }];
|
||||
|
||||
/**
|
||||
* A class that parses output from `git revert` and provides structured progress
|
||||
* events.
|
||||
*/
|
||||
export class RevertProgressParser extends GitProgressParser {
|
||||
public constructor() {
|
||||
super(steps);
|
||||
}
|
||||
}
|
||||
114
packages/noodl-git/src/core/pull.ts
Normal file
114
packages/noodl-git/src/core/pull.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { AuthenticationErrors } from './authentication';
|
||||
import { git, GitExecutionOptions, gitNetworkArguments } from './client';
|
||||
import { GitError } from './git-error';
|
||||
import { Branch } from './models/branch';
|
||||
import { IPullProgress } from './models/progress';
|
||||
import { IRemote } from './models/remote';
|
||||
import { PullProgressParser, executionOptionsWithProgress } from './progress';
|
||||
|
||||
/**
|
||||
* Returns the arguments to use on any git operation that can end up
|
||||
* triggering a rebase.
|
||||
*/
|
||||
function gitRebaseArguments() {
|
||||
return [
|
||||
// Explicitly set the rebase backend to merge.
|
||||
// We need to force this option to be sure that Desktop
|
||||
// uses the merge backend even if the user has the apply backend
|
||||
// configured, since this is the only one supported.
|
||||
// This can go away once git deprecates the apply backend.
|
||||
'-c',
|
||||
'rebase.backend=merge'
|
||||
];
|
||||
}
|
||||
|
||||
async function getPullArgs(
|
||||
repositoryDir: string,
|
||||
remote: string,
|
||||
branch: string,
|
||||
progressCallback?: (progress: IPullProgress) => void
|
||||
) {
|
||||
const networkArguments = await gitNetworkArguments(repositoryDir);
|
||||
|
||||
const args = [...networkArguments, ...gitRebaseArguments(), 'pull'];
|
||||
|
||||
if (progressCallback != null) {
|
||||
args.push('--progress');
|
||||
}
|
||||
|
||||
args.push(remote);
|
||||
args.push(branch);
|
||||
args.push('--rebase'); //this is used to reconcile the divergent branches so git doesn't complain, and to prevent merge commits when you have local changes
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull from the specified remote.
|
||||
*
|
||||
* @param repository - The repository in which the pull should take place
|
||||
*
|
||||
* @param remote - The name of the remote that should be pulled from
|
||||
*
|
||||
* @param branch
|
||||
*
|
||||
* @param progressCallback - An optional function which will be invoked
|
||||
* with information about the current progress
|
||||
* of the pull operation. When provided this enables
|
||||
* the '--progress' command line flag for
|
||||
* 'git pull'.
|
||||
*/
|
||||
export async function pull(
|
||||
repositoryDir: string,
|
||||
remote: IRemote,
|
||||
branch: Branch | string,
|
||||
progressCallback?: (progress: IPullProgress) => void
|
||||
): Promise<void> {
|
||||
let opts: GitExecutionOptions = {
|
||||
expectedErrors: AuthenticationErrors
|
||||
};
|
||||
|
||||
if (progressCallback) {
|
||||
const title = `Pulling ${remote.name}`;
|
||||
const kind = 'pull';
|
||||
|
||||
opts = await executionOptionsWithProgress({ ...opts }, new PullProgressParser(), (progress) => {
|
||||
// In addition to progress output from the remote end and from
|
||||
// git itself, the stderr output from pull contains information
|
||||
// about ref updates. We don't need to bring those into the progress
|
||||
// stream so we'll just punt on anything we don't know about for now.
|
||||
if (progress.kind === 'context') {
|
||||
if (!progress.text.startsWith('remote: Counting objects')) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const description = progress.kind === 'progress' ? progress.details.text : progress.text;
|
||||
|
||||
const value = progress.percent;
|
||||
|
||||
progressCallback({
|
||||
kind,
|
||||
title,
|
||||
description,
|
||||
value,
|
||||
remote: remote.name
|
||||
});
|
||||
});
|
||||
|
||||
// Initial progress
|
||||
progressCallback({ kind, title, value: 0, remote: remote.name });
|
||||
}
|
||||
|
||||
const args = await getPullArgs(
|
||||
repositoryDir,
|
||||
remote.name,
|
||||
typeof branch === 'object' ? branch.name : branch,
|
||||
progressCallback
|
||||
);
|
||||
const result = await git(args, repositoryDir, 'pull', opts);
|
||||
|
||||
if (result.gitErrorDescription) {
|
||||
throw new GitError(result, args);
|
||||
}
|
||||
}
|
||||
148
packages/noodl-git/src/core/push.ts
Normal file
148
packages/noodl-git/src/core/push.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import { GitError as DugiteError } from "dugite";
|
||||
|
||||
import { git, GitExecutionOptions, gitNetworkArguments } from "./client";
|
||||
import { GitError } from "./git-error";
|
||||
import { IPushProgress } from "./models/progress";
|
||||
import { PushProgressParser, executionOptionsWithProgress } from "./progress";
|
||||
import { AuthenticationErrors } from "./authentication";
|
||||
import { IRemote } from "./models/remote";
|
||||
|
||||
export type PushOptions = {
|
||||
/**
|
||||
* Force-push the branch without losing changes in the remote that
|
||||
* haven't been fetched.
|
||||
*
|
||||
* See https://git-scm.com/docs/git-push#Documentation/git-push.txt---no-force-with-lease
|
||||
*/
|
||||
readonly forceWithLease: boolean;
|
||||
};
|
||||
|
||||
export async function pushDelete(
|
||||
repositoryDir: string,
|
||||
remoteName: string,
|
||||
branchName: string
|
||||
) {
|
||||
const networkArguments = await gitNetworkArguments(repositoryDir);
|
||||
|
||||
const args = [
|
||||
...networkArguments,
|
||||
"push",
|
||||
remoteName,
|
||||
"--delete",
|
||||
branchName,
|
||||
];
|
||||
|
||||
const expectedErrors = new Set<DugiteError>(AuthenticationErrors);
|
||||
expectedErrors.add(DugiteError.ProtectedBranchForcePush);
|
||||
|
||||
let opts: GitExecutionOptions = {
|
||||
expectedErrors,
|
||||
};
|
||||
|
||||
const result = await git(args, repositoryDir, "push", opts);
|
||||
|
||||
if (result.gitErrorDescription) {
|
||||
throw new GitError(result, args);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Push from the remote to the branch, optionally setting the upstream.
|
||||
*
|
||||
* @param repository - The repository from which to push
|
||||
*
|
||||
* @param remote - The remote to push the specified branch to
|
||||
*
|
||||
* @param localBranch - The local branch to push
|
||||
*
|
||||
* @param remoteBranch - The remote branch to push to
|
||||
*
|
||||
* @param tagsToPush - The tags to push along with the branch.
|
||||
*
|
||||
* @param options - Optional customizations for the push execution.
|
||||
* see PushOptions for more information.
|
||||
*
|
||||
* @param progressCallback - An optional function which will be invoked
|
||||
* with information about the current progress
|
||||
* of the push operation. When provided this enables
|
||||
* the '--progress' command line flag for
|
||||
* 'git push'.
|
||||
*/
|
||||
export async function push(
|
||||
repositoryDir: string,
|
||||
remote: IRemote,
|
||||
localBranch: string,
|
||||
remoteBranch: string | null,
|
||||
tagsToPush: ReadonlyArray<string> | null,
|
||||
options: PushOptions = {
|
||||
forceWithLease: false,
|
||||
},
|
||||
progressCallback?: (progress: IPushProgress) => void
|
||||
): Promise<boolean> {
|
||||
const networkArguments = await gitNetworkArguments(repositoryDir);
|
||||
|
||||
const args = [
|
||||
...networkArguments,
|
||||
"push",
|
||||
remote.name,
|
||||
remoteBranch ? `${localBranch}:${remoteBranch}` : localBranch,
|
||||
];
|
||||
|
||||
if (tagsToPush !== null) {
|
||||
args.push(...tagsToPush);
|
||||
}
|
||||
if (!remoteBranch) {
|
||||
args.push("--set-upstream");
|
||||
} else if (options.forceWithLease === true) {
|
||||
args.push("--force-with-lease");
|
||||
}
|
||||
|
||||
const expectedErrors = new Set<DugiteError>(AuthenticationErrors);
|
||||
expectedErrors.add(DugiteError.ProtectedBranchForcePush);
|
||||
|
||||
let opts: GitExecutionOptions = {
|
||||
expectedErrors,
|
||||
};
|
||||
|
||||
if (progressCallback) {
|
||||
args.push("--progress");
|
||||
const title = `Pushing to ${remote.name}`;
|
||||
const kind = "push";
|
||||
|
||||
opts = await executionOptionsWithProgress(
|
||||
{ ...opts },
|
||||
new PushProgressParser(),
|
||||
(progress) => {
|
||||
const description =
|
||||
progress.kind === "progress" ? progress.details.text : progress.text;
|
||||
const value = progress.percent;
|
||||
|
||||
progressCallback({
|
||||
kind,
|
||||
title,
|
||||
description,
|
||||
value,
|
||||
remote: remote.name,
|
||||
branch: localBranch,
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
// Initial progress
|
||||
progressCallback({
|
||||
kind: "push",
|
||||
title,
|
||||
value: 0,
|
||||
remote: remote.name,
|
||||
branch: localBranch,
|
||||
});
|
||||
}
|
||||
|
||||
const result = await git(args, repositoryDir, "push", opts);
|
||||
|
||||
if (result.gitErrorDescription) {
|
||||
throw new GitError(result, args);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
114
packages/noodl-git/src/core/rebase.ts
Normal file
114
packages/noodl-git/src/core/rebase.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import Path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
/**
|
||||
* Rebase internal state used to track how and where the rebase is applied to
|
||||
* the repository.
|
||||
*/
|
||||
export type RebaseInternalState = {
|
||||
/** The branch containing commits that should be rebased */
|
||||
readonly targetBranch: string;
|
||||
/**
|
||||
* The commit ID of the base branch, to be used as a starting point for
|
||||
* the rebase.
|
||||
*/
|
||||
readonly baseBranchTip: string;
|
||||
/**
|
||||
* The commit ID of the target branch at the start of the rebase, which points
|
||||
* to the original commit history.
|
||||
*/
|
||||
readonly originalBranchTip: string;
|
||||
};
|
||||
|
||||
/** The app-specific results from attempting to rebase a repository */
|
||||
export enum RebaseResult {
|
||||
/**
|
||||
* Git completed the rebase without reporting any errors, and the caller can
|
||||
* signal success to the user.
|
||||
*/
|
||||
CompletedWithoutError = 'CompletedWithoutError',
|
||||
/**
|
||||
* The rebase encountered conflicts while attempting to rebase, and these
|
||||
* need to be resolved by the user before the rebase can continue.
|
||||
*/
|
||||
ConflictsEncountered = 'ConflictsEncountered',
|
||||
/**
|
||||
* The rebase was not able to continue as tracked files were not staged in
|
||||
* the index.
|
||||
*/
|
||||
OutstandingFilesNotStaged = 'OutstandingFilesNotStaged',
|
||||
/**
|
||||
* The rebase was not attempted because it could not check the status of the
|
||||
* repository. The caller needs to confirm the repository is in a usable
|
||||
* state.
|
||||
*/
|
||||
Aborted = 'Aborted',
|
||||
/**
|
||||
* An unexpected error as part of the rebase flow was caught and handled.
|
||||
*
|
||||
* Check the logs to find the relevant Git details.
|
||||
*/
|
||||
Error = 'Error'
|
||||
}
|
||||
|
||||
/**
|
||||
* Check the `.git/REBASE_HEAD` file exists in a repository to confirm
|
||||
* a rebase operation is underway.
|
||||
*/
|
||||
async function isRebaseHeadSet(repositoryDir: string): Promise<boolean> {
|
||||
const path = Path.join(repositoryDir, '.git', 'REBASE_HEAD');
|
||||
|
||||
// NOTE: access calls reject, which causes issues with devtools.
|
||||
// return fs.promises.access(path).then(constant(true), constant(false));
|
||||
|
||||
return new Promise<boolean>((resolve) => {
|
||||
resolve(fs.existsSync(path));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the internal state about the rebase being performed on a repository. This
|
||||
* information is required to help Desktop display information to the user
|
||||
* about the current action as well as the options available.
|
||||
*
|
||||
* Returns `null` if no rebase is detected, or if the expected information
|
||||
* cannot be found in the repository.
|
||||
*/
|
||||
export async function getRebaseInternalState(repositoryDir: string): Promise<RebaseInternalState | null> {
|
||||
const isRebase = await isRebaseHeadSet(repositoryDir);
|
||||
|
||||
if (!isRebase) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let originalBranchTip: string | null = null;
|
||||
let targetBranch: string | null = null;
|
||||
let baseBranchTip: string | null = null;
|
||||
|
||||
try {
|
||||
originalBranchTip = await fs.promises.readFile(
|
||||
Path.join(repositoryDir, '.git', 'rebase-merge', 'orig-head'),
|
||||
'utf8'
|
||||
);
|
||||
|
||||
originalBranchTip = originalBranchTip.trim();
|
||||
|
||||
targetBranch = await fs.promises.readFile(Path.join(repositoryDir, '.git', 'rebase-merge', 'head-name'), 'utf8');
|
||||
|
||||
if (targetBranch.startsWith('refs/heads/')) {
|
||||
targetBranch = targetBranch.substring(11).trim();
|
||||
}
|
||||
|
||||
baseBranchTip = await fs.promises.readFile(Path.join(repositoryDir, '.git', 'rebase-merge', 'onto'), 'utf8');
|
||||
|
||||
baseBranchTip = baseBranchTip.trim();
|
||||
} catch {}
|
||||
|
||||
if (originalBranchTip != null && targetBranch != null && baseBranchTip != null) {
|
||||
return { originalBranchTip, targetBranch, baseBranchTip };
|
||||
}
|
||||
|
||||
// unable to resolve the rebase state of this repository
|
||||
|
||||
return null;
|
||||
}
|
||||
7
packages/noodl-git/src/core/refs.ts
Normal file
7
packages/noodl-git/src/core/refs.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { git } from './client';
|
||||
|
||||
export async function refhead(basePath: string) {
|
||||
const { output } = await git(['rev-parse', 'HEAD'], basePath, 'refhead');
|
||||
|
||||
return output.toString().replace(/[\r\n]/g, '');
|
||||
}
|
||||
64
packages/noodl-git/src/core/remotes.ts
Normal file
64
packages/noodl-git/src/core/remotes.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { GitError } from 'dugite';
|
||||
import { git } from './client';
|
||||
import { IRemote } from './models/remote';
|
||||
|
||||
/**
|
||||
* List the remotes, sorted alphabetically by `name`, for a repository.
|
||||
*/
|
||||
export async function getRemotes(basePath: string): Promise<ReadonlyArray<IRemote>> {
|
||||
const result = await git(['remote', '-v'], basePath, 'getRemotes', {
|
||||
expectedErrors: new Set([GitError.NotAGitRepository])
|
||||
});
|
||||
|
||||
if (result.gitError === GitError.NotAGitRepository) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const output = result.output.toString();
|
||||
const lines = output.split('\n');
|
||||
const remotes = lines
|
||||
.filter((x) => x.endsWith('(fetch)'))
|
||||
.map((x) => x.split(/\s+/))
|
||||
.map((x) => ({ name: x[0], url: x[1] }));
|
||||
|
||||
return remotes;
|
||||
}
|
||||
|
||||
/** Add a new remote with the given URL. */
|
||||
export async function addRemote(basePath: string, name: string, url: string): Promise<IRemote> {
|
||||
await git(['remote', 'add', name, url], basePath, 'addRemote');
|
||||
|
||||
return { url, name };
|
||||
}
|
||||
|
||||
/** Removes an existing remote, or silently errors if it doesn't exist */
|
||||
export async function removeRemote(basePath: string, name: string): Promise<void> {
|
||||
const options = {
|
||||
successExitCodes: new Set([0, 2, 128])
|
||||
};
|
||||
|
||||
await git(['remote', 'remove', name], basePath, 'removeRemote', options);
|
||||
}
|
||||
|
||||
/** Changes the URL for the remote that matches the given name */
|
||||
export async function setRemoteURL(basePath: string, name: string, url: string): Promise<true> {
|
||||
await git(['remote', 'set-url', name, url], basePath, 'setRemoteURL');
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the URL for the remote that matches the given name.
|
||||
*
|
||||
* Returns null if the remote could not be found
|
||||
*/
|
||||
export async function getRemoteURL(basePath: string, name: string): Promise<string | null> {
|
||||
const result = await git(['remote', 'get-url', name], basePath, 'getRemoteURL', {
|
||||
successExitCodes: new Set([0, 2, 128])
|
||||
});
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return result.output.toString();
|
||||
}
|
||||
48
packages/noodl-git/src/core/reset.ts
Normal file
48
packages/noodl-git/src/core/reset.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { git } from "./client";
|
||||
|
||||
/** The reset modes which are supported. */
|
||||
export const enum GitResetMode {
|
||||
/**
|
||||
* Resets the index and working tree. Any changes to tracked files in the
|
||||
* working tree since <commit> are discarded.
|
||||
*/
|
||||
Hard = 0,
|
||||
/**
|
||||
* Does not touch the index file or the working tree at all (but resets the
|
||||
* head to <commit>, just like all modes do). This leaves all your changed
|
||||
* files "Changes to be committed", as git status would put it.
|
||||
*/
|
||||
Soft,
|
||||
|
||||
/**
|
||||
* Resets the index but not the working tree (i.e., the changed files are
|
||||
* preserved but not marked for commit) and reports what has not been updated.
|
||||
* This is the default action for git reset.
|
||||
*/
|
||||
Mixed,
|
||||
}
|
||||
|
||||
function resetModeToArgs(mode: GitResetMode, ref: string): string[] {
|
||||
switch (mode) {
|
||||
case GitResetMode.Hard:
|
||||
return ["reset", "--hard", ref];
|
||||
case GitResetMode.Mixed:
|
||||
return ["reset", ref];
|
||||
case GitResetMode.Soft:
|
||||
return ["reset", "--soft", ref];
|
||||
default:
|
||||
throw new Error(`Unknown reset mode: ${mode}`);
|
||||
}
|
||||
}
|
||||
|
||||
/** Reset with the mode to the ref. */
|
||||
export async function reset(
|
||||
repositoryDir: string,
|
||||
mode: GitResetMode,
|
||||
ref: string,
|
||||
additionalArgs: string[] = []
|
||||
): Promise<true> {
|
||||
const args = resetModeToArgs(mode, ref).concat(additionalArgs);
|
||||
await git(args, repositoryDir, "reset");
|
||||
return true;
|
||||
}
|
||||
183
packages/noodl-git/src/core/rev-list.ts
Normal file
183
packages/noodl-git/src/core/rev-list.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { GitError } from "dugite";
|
||||
import { git } from "./client";
|
||||
import { Branch, BranchType, IAheadBehind } from "./models/branch";
|
||||
import { CommitOneLine } from "./models/snapshot";
|
||||
|
||||
/**
|
||||
* Convert two refs into the Git range syntax representing the set of commits
|
||||
* that are reachable from `to` but excluding those that are reachable from
|
||||
* `from`. This will not be inclusive to the `from` ref, see
|
||||
* `revRangeInclusive`.
|
||||
*
|
||||
* Each parameter can be the commit SHA or a ref name, or specify an empty
|
||||
* string to represent HEAD.
|
||||
*
|
||||
* @param from The start of the range
|
||||
* @param to The end of the range
|
||||
*/
|
||||
export function revRange(from: string, to: string) {
|
||||
return `${from}..${to}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert two refs into the Git range syntax representing the set of commits
|
||||
* that are reachable from `to` but excluding those that are reachable from
|
||||
* `from`. However as opposed to `revRange`, this will also include `from` ref.
|
||||
*
|
||||
* Each parameter can be the commit SHA or a ref name, or specify an empty
|
||||
* string to represent HEAD.
|
||||
*
|
||||
* @param from The start of the range
|
||||
* @param to The end of the range
|
||||
*/
|
||||
export function revRangeInclusive(from: string, to: string) {
|
||||
return `${from}^..${to}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert two refs into the Git symmetric difference syntax, which represents
|
||||
* the set of commits that are reachable from either `from` or `to` but not
|
||||
* from both.
|
||||
*
|
||||
* Each parameter can be the commit SHA or a ref name, or you can use an empty
|
||||
* string to represent HEAD.
|
||||
*
|
||||
* @param from The start of the range
|
||||
* @param to The end of the range
|
||||
*/
|
||||
export function revSymmetricDifference(from: string, to: string) {
|
||||
return `${from}...${to}`;
|
||||
}
|
||||
|
||||
/** Calculate the number of commits the range is ahead and behind. */
|
||||
export async function getAheadBehind(
|
||||
repositoryDir: string,
|
||||
range: string
|
||||
): Promise<IAheadBehind | null> {
|
||||
// `--left-right` annotates the list of commits in the range with which side
|
||||
// they're coming from. When used with `--count`, it tells us how many
|
||||
// commits we have from the two different sides of the range.
|
||||
const args = ["rev-list", "--left-right", "--count", range, "--"];
|
||||
const result = await git(args, repositoryDir, "getAheadBehind", {
|
||||
expectedErrors: new Set([GitError.BadRevision]),
|
||||
});
|
||||
|
||||
// This means one of the refs (most likely the upstream branch) no longer
|
||||
// exists. In that case we can't be ahead/behind at all.
|
||||
if (result.gitError === GitError.BadRevision) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const stdout = result.output.toString();
|
||||
const pieces = stdout.split("\t");
|
||||
if (pieces.length !== 2) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const ahead = parseInt(pieces[0], 10);
|
||||
if (isNaN(ahead)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const behind = parseInt(pieces[1], 10);
|
||||
if (isNaN(behind)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return { ahead, behind };
|
||||
}
|
||||
|
||||
/** Calculate the number of commits `branch` is ahead/behind its upstream. */
|
||||
export async function getBranchAheadBehind(
|
||||
repositoryDir: string,
|
||||
branch: Branch
|
||||
): Promise<IAheadBehind | null> {
|
||||
if (branch.type === BranchType.Remote) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const upstream = branch.upstream;
|
||||
if (!upstream) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// NB: The three dot form means we'll go all the way back to the merge base
|
||||
// of the branch and its upstream. Practically this is important for seeing
|
||||
// "through" merges.
|
||||
const range = revSymmetricDifference(branch.name, upstream);
|
||||
return getAheadBehind(repositoryDir, range);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of commits from the target branch that do not exist on the base
|
||||
* branch, ordered how they will be applied to the base branch.
|
||||
* Therefore, this will not include the baseBranchSha commit.
|
||||
*
|
||||
* This emulates how `git rebase` initially determines what will be applied to
|
||||
* the repository.
|
||||
*
|
||||
* Returns `null` when the rebase is not possible to perform, because of a
|
||||
* missing commit ID
|
||||
*/
|
||||
export async function getCommitsBetweenCommits(
|
||||
repositoryDir: string,
|
||||
baseBranchSha: string,
|
||||
targetBranchSha: string
|
||||
): Promise<ReadonlyArray<CommitOneLine> | null> {
|
||||
const range = revRange(baseBranchSha, targetBranchSha);
|
||||
|
||||
return getCommitsInRange(repositoryDir, range);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of commits inside the provided range.
|
||||
*
|
||||
* Returns `null` when it is not possible to perform because of a bad range.
|
||||
*/
|
||||
export async function getCommitsInRange(
|
||||
repositoryDir: string,
|
||||
range: string
|
||||
): Promise<ReadonlyArray<CommitOneLine> | null> {
|
||||
const args = [
|
||||
"rev-list",
|
||||
range,
|
||||
"--reverse",
|
||||
// the combination of these two arguments means each line of the stdout
|
||||
// will contain the full commit sha and a commit summary
|
||||
`--oneline`,
|
||||
`--no-abbrev-commit`,
|
||||
"--",
|
||||
];
|
||||
|
||||
const options = {
|
||||
expectedErrors: new Set<GitError>([GitError.BadRevision]),
|
||||
};
|
||||
|
||||
const result = await git(args, repositoryDir, "getCommitsInRange", options);
|
||||
|
||||
if (result.gitError === GitError.BadRevision) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const lines = result.output.toString().split("\n");
|
||||
|
||||
const commits = new Array<CommitOneLine>();
|
||||
|
||||
const commitSummaryRe = /^([a-z0-9]{40}) (.*)$/;
|
||||
|
||||
for (const line of lines) {
|
||||
const match = commitSummaryRe.exec(line);
|
||||
|
||||
if (match !== null && match.length === 3) {
|
||||
const sha = match[1];
|
||||
const summary = match[2];
|
||||
|
||||
commits.push({
|
||||
sha,
|
||||
summary,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return commits;
|
||||
}
|
||||
52
packages/noodl-git/src/core/show-ref.ts
Normal file
52
packages/noodl-git/src/core/show-ref.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { git } from './client';
|
||||
|
||||
export async function getRefHeads(repositoryDir: string): Promise<Map<string, string>> {
|
||||
const args = ['show-ref', '--heads', '-d'];
|
||||
|
||||
const tags = await git(args, repositoryDir, 'getRefHeads', {
|
||||
successExitCodes: new Set([0, 1]) // when there are no tags, git exits with 1.
|
||||
});
|
||||
|
||||
const tagsArray: Array<[string, string]> = tags.output
|
||||
.toString()
|
||||
.split('\n')
|
||||
.filter((line) => line !== '')
|
||||
.map((line) => {
|
||||
const [commitSha, rawTagName] = line.split(' ');
|
||||
|
||||
// Normalize tag names by removing the leading ref/tags/ and the trailing ^{}.
|
||||
//
|
||||
// git show-ref returns two entries for annotated tags:
|
||||
// deadbeef refs/tags/annotated-tag
|
||||
// de510b99 refs/tags/annotated-tag^{}
|
||||
//
|
||||
// The first entry sha correspond to the blob object of the annotation, while the second
|
||||
// entry corresponds to the actual commit where the tag was created.
|
||||
// By normalizing the tag name we can make sure that the commit sha gets stored in the returned
|
||||
// Map of commits (since git will always print the entry with the commit sha at the end).
|
||||
const headName = rawTagName.replace(/^refs\/heads\//, '').replace(/\^\{\}$/, '');
|
||||
|
||||
return [headName, commitSha];
|
||||
});
|
||||
|
||||
return new Map(tagsArray);
|
||||
}
|
||||
|
||||
export async function getAllTags(repositoryDir: string): Promise<Map<string, string>> {
|
||||
const args = ['show-ref', '-d'];
|
||||
|
||||
const tags = await git(args, repositoryDir, 'getAllTags', {
|
||||
successExitCodes: new Set([0, 1]) // when there are no tags, git exits with 1.
|
||||
});
|
||||
|
||||
const tagsArray: Array<[string, string]> = tags.output
|
||||
.toString()
|
||||
.split('\n')
|
||||
.filter((line) => line !== '')
|
||||
.map((line) => {
|
||||
const [commitSha, rawTagName] = line.split(' ');
|
||||
return [rawTagName, commitSha];
|
||||
});
|
||||
|
||||
return new Map(tagsArray);
|
||||
}
|
||||
48
packages/noodl-git/src/core/show.ts
Normal file
48
packages/noodl-git/src/core/show.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { ChildProcess } from 'child_process';
|
||||
import { git, GitExecutionOptions } from './client';
|
||||
|
||||
/**
|
||||
* Retrieve the contents of a file from the repository at a given
|
||||
* reference, commit, or tree.
|
||||
*
|
||||
* Returns a promise that will produce a string Buffer instance containing
|
||||
* the contents of the file or an error if the file doesn't
|
||||
* exists in the given revision.
|
||||
*
|
||||
* @param repositoryDir - The repository directory from where to read the file
|
||||
*
|
||||
* @param commitish - A commit SHA or some other identifier that
|
||||
* ultimately dereferences to a commit/tree.
|
||||
*
|
||||
* @param path - The file path, relative to the repository
|
||||
* root from where to read the blob contents
|
||||
*/
|
||||
export async function getBlobContents(repositoryDir: string, commitish: string, path: string): Promise<string> {
|
||||
const args = ['show', `${commitish}:${path}`];
|
||||
const opts: GitExecutionOptions = {
|
||||
parseErrors: false
|
||||
};
|
||||
|
||||
const { output } = await git(args, repositoryDir, 'getBlobContents', opts);
|
||||
return output;
|
||||
}
|
||||
|
||||
export async function getBlobBinaryContents(repositoryDir: string, commitish: string, path: string): Promise<Buffer> {
|
||||
const setBinaryEncoding: (process: ChildProcess) => void = (cb) => {
|
||||
// If Node.js encounters a synchronous runtime error while spawning
|
||||
// `stdout` will be undefined and the error will be emitted asynchronously
|
||||
if (cb.stdout) {
|
||||
cb.stdout.setEncoding('binary');
|
||||
}
|
||||
};
|
||||
|
||||
const args = ['show', `${commitish}:${path}`];
|
||||
const opts: GitExecutionOptions = {
|
||||
parseErrors: false,
|
||||
processCallback: setBinaryEncoding
|
||||
};
|
||||
|
||||
const { output } = await git(args, repositoryDir, 'getBlobBinaryContents', opts);
|
||||
|
||||
return Buffer.from(output, 'binary');
|
||||
}
|
||||
59
packages/noodl-git/src/core/stash-files.ts
Normal file
59
packages/noodl-git/src/core/stash-files.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { git } from "./client";
|
||||
import { parseChangedFiles } from "./logs";
|
||||
import { CommittedFileChange } from "./models/status";
|
||||
|
||||
/**
|
||||
* Get the files that were changed in the given stash commit.
|
||||
*
|
||||
* This is different than `getChangedFiles` because stashes
|
||||
* have _3 parents(!!!)_
|
||||
*/
|
||||
export async function getStashedFiles(
|
||||
repositoryDir: string,
|
||||
stashSha: string
|
||||
): Promise<ReadonlyArray<CommittedFileChange>> {
|
||||
const [trackedFiles, untrackedFiles] = await Promise.all([
|
||||
getChangedFilesWithinStash(repositoryDir, stashSha),
|
||||
getChangedFilesWithinStash(repositoryDir, `${stashSha}^3`),
|
||||
]);
|
||||
|
||||
const files = new Map<string, CommittedFileChange>();
|
||||
trackedFiles.forEach((x) => files.set(x.path, x));
|
||||
untrackedFiles.forEach((x) => files.set(x.path, x));
|
||||
return [...files.values()].sort((x, y) => x.path.localeCompare(y.path));
|
||||
}
|
||||
|
||||
/**
|
||||
* Same thing as `getChangedFiles` but with extra handling for 128 exit code
|
||||
* (which happens if the commit's parent is not valid)
|
||||
*/
|
||||
async function getChangedFilesWithinStash(repositoryDir: string, sha: string) {
|
||||
// opt-in for rename detection (-M) and copies detection (-C)
|
||||
// this is equivalent to the user configuring 'diff.renames' to 'copies'
|
||||
// NOTE: order here matters - doing -M before -C means copies aren't detected
|
||||
const args = [
|
||||
"log",
|
||||
sha,
|
||||
"-C",
|
||||
"-M",
|
||||
"-m",
|
||||
"-1",
|
||||
"--no-show-signature",
|
||||
"--first-parent",
|
||||
"--name-status",
|
||||
"--format=format:",
|
||||
"-z",
|
||||
"--",
|
||||
];
|
||||
const result = await git(args, repositoryDir, "getChangedFilesForStash", {
|
||||
// if this fails, its most likely
|
||||
// because there weren't any untracked files,
|
||||
// and that's okay!
|
||||
successExitCodes: new Set([0, 128]),
|
||||
});
|
||||
|
||||
if (result.exitCode === 0 && result.output.toString().length > 0) {
|
||||
return parseChangedFiles(result.output.toString(), sha);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
242
packages/noodl-git/src/core/stash.ts
Normal file
242
packages/noodl-git/src/core/stash.ts
Normal file
@@ -0,0 +1,242 @@
|
||||
import { GitError as DugiteError } from "dugite";
|
||||
import { git } from "./client";
|
||||
import { GitError } from "./git-error";
|
||||
import {
|
||||
Stash,
|
||||
StashedChangesLoadStates,
|
||||
StashedFileChanges,
|
||||
} from "./models/snapshot";
|
||||
import { Branch } from "./models/branch";
|
||||
import { CommitIdentity } from "./models/commit-identity";
|
||||
import { GitActionError, GitActionErrorCode } from "../actions";
|
||||
|
||||
/**
|
||||
* RegEx for determining if a stash entry is created by Desktop
|
||||
*
|
||||
* This is done by looking for a magic string with the following
|
||||
* formats:
|
||||
* `On branchname: some message`
|
||||
* `WIP on branchname: some message` (git default when message is omitted)
|
||||
*/
|
||||
const desktopStashEntryMessageRe = /on (.+):/i;
|
||||
|
||||
type StashResult = {
|
||||
/** The stash entries created by Desktop */
|
||||
readonly entries: ReadonlyArray<Stash>;
|
||||
|
||||
/**
|
||||
* The total amount of stash entries,
|
||||
* i.e. stash entries created both by Desktop and outside of Desktop
|
||||
*/
|
||||
readonly stashEntryCount: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the list of stash entries created by Desktop in the current repository
|
||||
* using the default ordering of refs (which is LIFO ordering),
|
||||
* as well as the total amount of stash entries.
|
||||
*/
|
||||
export async function getStashes(repositoryDir: string): Promise<StashResult> {
|
||||
const delimiter = "1F";
|
||||
const delimiterString = String.fromCharCode(parseInt(delimiter, 16));
|
||||
const format = ["%gD", "%H", "%gs", "%an <%ae> %at +0000"].join(
|
||||
`%x${delimiter}`
|
||||
);
|
||||
|
||||
const result = await git(
|
||||
["log", "-g", "-z", `--pretty=${format}`, "refs/stash"],
|
||||
repositoryDir,
|
||||
"getStashEntries",
|
||||
{
|
||||
successExitCodes: new Set([0, 128]),
|
||||
}
|
||||
);
|
||||
|
||||
// There's no refs/stashes reflog in the repository or it's not
|
||||
// even a repository. In either case we don't care
|
||||
if (result.exitCode === 128) {
|
||||
return { entries: [], stashEntryCount: 0 };
|
||||
}
|
||||
|
||||
const desktopStashEntries: Array<Stash> = [];
|
||||
const files: StashedFileChanges = {
|
||||
kind: StashedChangesLoadStates.NotLoaded,
|
||||
};
|
||||
|
||||
const entries = result.output
|
||||
.toString()
|
||||
.split("\0")
|
||||
.filter((s) => s !== "");
|
||||
for (const entry of entries) {
|
||||
const pieces = entry.split(delimiterString);
|
||||
if (pieces.length === 4) {
|
||||
const [name, stashSha, message, identity] = pieces;
|
||||
const branchName = extractBranchFromMessage(message);
|
||||
|
||||
// Example: 'On main: !!Noodl<main>'
|
||||
const marker = message.split(":")[1].trim();
|
||||
|
||||
if (branchName !== null) {
|
||||
desktopStashEntries.push(
|
||||
new Stash(
|
||||
repositoryDir,
|
||||
name,
|
||||
branchName,
|
||||
stashSha,
|
||||
marker,
|
||||
CommitIdentity.parseIdentity(identity),
|
||||
files
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
entries: desktopStashEntries,
|
||||
stashEntryCount: entries.length - 1,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the last Desktop created stash entry for the given branch
|
||||
*/
|
||||
export async function getLastStashEntryForBranch(
|
||||
repositoryDir: string,
|
||||
branch: Branch | string
|
||||
) {
|
||||
const stash = await getStashes(repositoryDir);
|
||||
const branchName = typeof branch === "string" ? branch : branch.name;
|
||||
|
||||
// Since stash objects are returned in a LIFO manner, the first
|
||||
// entry found is guaranteed to be the last entry created
|
||||
return stash.entries.find((stash) => stash.branchName === branchName) || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stash the working directory changes for the current branch
|
||||
*/
|
||||
export async function createStashEntry(
|
||||
repositoryDir: string,
|
||||
message: string
|
||||
): Promise<Stash> {
|
||||
const args = ["stash", "push", "-u", "-m", message];
|
||||
const result = await git(args, repositoryDir, "createStashEntry", {
|
||||
successExitCodes: new Set<number>([0, 1]),
|
||||
});
|
||||
|
||||
if (result.exitCode === 1) {
|
||||
// search for any line starting with `error:` - /m here to ensure this is
|
||||
// applied to each line, without needing to split the text
|
||||
const errorPrefixRe = /^error: /m;
|
||||
|
||||
const matches = errorPrefixRe.exec(result.error.toString());
|
||||
if (matches !== null && matches.length > 0) {
|
||||
// rethrow, because these messages should prevent the stash from being created
|
||||
throw new GitError(result, args);
|
||||
}
|
||||
|
||||
// if no error messages were emitted by Git, we should log but continue because
|
||||
// a valid stash was created and this should not interfere with the checkout
|
||||
|
||||
console.info(
|
||||
`[createStashEntry] a stash was created successfully but exit code ${
|
||||
result.exitCode
|
||||
} reported. stderr: ${result.error.toString()}`
|
||||
);
|
||||
}
|
||||
|
||||
const response = result.output.toString();
|
||||
|
||||
// Stash doesn't consider it an error that there aren't any local changes to save.
|
||||
if (response === "No local changes to save\n") {
|
||||
throw new GitActionError(GitActionErrorCode.StashNoLocalChanges);
|
||||
}
|
||||
|
||||
// Fetch all the stashes and return the one we just created.
|
||||
const stashes = await getStashes(repositoryDir);
|
||||
return stashes.entries.find((x) => x.message === message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the given stash entry if it exists
|
||||
*
|
||||
* @param marker
|
||||
*/
|
||||
export async function dropStashEntry(repositoryDir: string, marker: string) {
|
||||
if (marker !== null) {
|
||||
const args = ["stash", "drop", marker];
|
||||
await git(args, repositoryDir, "dropStashEntry");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pops the stash entry identified by matching `stashSha` to its commit hash.
|
||||
*
|
||||
* To see the commit hash of stash entry, run
|
||||
* `git log -g refs/stash --pretty="%nentry: %gd%nsubject: %gs%nhash: %H%n"`
|
||||
* in a repo with some stash entries.
|
||||
*/
|
||||
export async function popStashEntry(
|
||||
repositoryDir: string,
|
||||
marker: string
|
||||
): Promise<void> {
|
||||
// ignoring these git errors for now, this will change when we start
|
||||
// implementing the stash conflict flow
|
||||
const expectedErrors = new Set<DugiteError>([DugiteError.MergeConflicts]);
|
||||
const successExitCodes = new Set<number>([0, 1]);
|
||||
|
||||
if (marker === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const args = ["stash", "pop", "--quiet", `${marker}`];
|
||||
const result = await git(args, repositoryDir, "popStashEntry", {
|
||||
expectedErrors,
|
||||
successExitCodes,
|
||||
spawn: false,
|
||||
});
|
||||
|
||||
// popping a stashes that create conflicts in the working directory
|
||||
// report an exit code of `1` and are not dropped after being applied.
|
||||
// so, we check for this case and drop them manually
|
||||
if (result.exitCode === 1) {
|
||||
if (result.error.toString().length > 0) {
|
||||
// rethrow, because anything in stderr should prevent the stash from being popped
|
||||
throw new GitError(result, args);
|
||||
}
|
||||
|
||||
console.info(
|
||||
`[popStashEntry] a stash was popped successfully but exit code ${result.exitCode} reported.`
|
||||
);
|
||||
// bye bye
|
||||
await dropStashEntry(repositoryDir, marker);
|
||||
}
|
||||
}
|
||||
|
||||
export async function popStashEntryToBranch(
|
||||
repositoryDir: string,
|
||||
marker: string,
|
||||
branchName: string
|
||||
) {
|
||||
// ignoring these git errors for now, this will change when we start
|
||||
// implementing the stash conflict flow
|
||||
const expectedErrors = new Set<DugiteError>([DugiteError.MergeConflicts]);
|
||||
const successExitCodes = new Set<number>([0, 1]);
|
||||
|
||||
const args = ["stash", "branch", branchName, `${marker}`];
|
||||
const result = await git(args, repositoryDir, "popStashEntryToBranch", {
|
||||
expectedErrors,
|
||||
successExitCodes,
|
||||
spawn: false,
|
||||
});
|
||||
|
||||
if (result.exitCode === 1) {
|
||||
throw new GitError(result, args);
|
||||
}
|
||||
}
|
||||
|
||||
function extractBranchFromMessage(message: string): string | null {
|
||||
const match = desktopStashEntryMessageRe.exec(message);
|
||||
return match === null || match[1].length === 0 ? null : match[1];
|
||||
}
|
||||
358
packages/noodl-git/src/core/status-parser.ts
Normal file
358
packages/noodl-git/src/core/status-parser.ts
Normal file
@@ -0,0 +1,358 @@
|
||||
import Deque from 'double-ended-queue';
|
||||
|
||||
import { FileEntry, GitStatusEntry, UnmergedEntrySummary } from './models/status';
|
||||
|
||||
type StatusItem = IStatusHeader | IStatusEntry;
|
||||
|
||||
export interface IStatusHeader {
|
||||
readonly kind: 'header';
|
||||
readonly value: string;
|
||||
}
|
||||
|
||||
/** A representation of a parsed status entry from git status */
|
||||
export interface IStatusEntry {
|
||||
readonly kind: 'entry';
|
||||
|
||||
/** The path to the file relative to the repository root */
|
||||
readonly path: string;
|
||||
|
||||
/** The two character long status code */
|
||||
readonly statusCode: string;
|
||||
|
||||
/** The original path in the case of a renamed file */
|
||||
readonly oldPath?: string;
|
||||
}
|
||||
|
||||
export function isStatusHeader(statusItem: StatusItem): statusItem is IStatusHeader {
|
||||
return statusItem.kind === 'header';
|
||||
}
|
||||
|
||||
export function isStatusEntry(statusItem: StatusItem): statusItem is IStatusEntry {
|
||||
return statusItem.kind === 'entry';
|
||||
}
|
||||
|
||||
const ChangedEntryType = '1';
|
||||
const RenamedOrCopiedEntryType = '2';
|
||||
const UnmergedEntryType = 'u';
|
||||
const UntrackedEntryType = '?';
|
||||
const IgnoredEntryType = '!';
|
||||
|
||||
/** Parses output from git status --porcelain -z into file status entries */
|
||||
export function parsePorcelainStatus(output: string): ReadonlyArray<StatusItem> {
|
||||
const entries = new Array<StatusItem>();
|
||||
|
||||
// See https://git-scm.com/docs/git-status
|
||||
//
|
||||
// In the short-format, the status of each path is shown as
|
||||
// XY PATH1 -> PATH2
|
||||
//
|
||||
// There is also an alternate -z format recommended for machine parsing. In that
|
||||
// format, the status field is the same, but some other things change. First,
|
||||
// the -> is omitted from rename entries and the field order is reversed (e.g
|
||||
// from -> to becomes to from). Second, a NUL (ASCII 0) follows each filename,
|
||||
// replacing space as a field separator and the terminating newline (but a space
|
||||
// still separates the status field from the first filename). Third, filenames
|
||||
// containing special characters are not specially formatted; no quoting or
|
||||
// backslash-escaping is performed.
|
||||
|
||||
const tokens = output.split('\0');
|
||||
const queue = new Deque(tokens);
|
||||
|
||||
let field: string | undefined;
|
||||
|
||||
while ((field = queue.shift())) {
|
||||
if (field.startsWith('# ') && field.length > 2) {
|
||||
entries.push({ kind: 'header', value: field.substr(2) });
|
||||
continue;
|
||||
}
|
||||
|
||||
const entryKind = field.substr(0, 1);
|
||||
|
||||
if (entryKind === ChangedEntryType) {
|
||||
entries.push(parseChangedEntry(field));
|
||||
} else if (entryKind === RenamedOrCopiedEntryType) {
|
||||
entries.push(parsedRenamedOrCopiedEntry(field, queue.shift()));
|
||||
} else if (entryKind === UnmergedEntryType) {
|
||||
entries.push(parseUnmergedEntry(field));
|
||||
} else if (entryKind === UntrackedEntryType) {
|
||||
entries.push(parseUntrackedEntry(field));
|
||||
} else if (entryKind === IgnoredEntryType) {
|
||||
// Ignored, we don't care about these for now
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
// 1 <XY> <sub> <mH> <mI> <mW> <hH> <hI> <path>
|
||||
const changedEntryRe =
|
||||
/^1 ([MADRCUTX?!.]{2}) (N\.\.\.|S[C.][M.][U.]) (\d+) (\d+) (\d+) ([a-f0-9]+) ([a-f0-9]+) ([\s\S]*?)$/;
|
||||
|
||||
function parseChangedEntry(field: string): IStatusEntry {
|
||||
const match = changedEntryRe.exec(field);
|
||||
|
||||
if (!match) {
|
||||
console.debug(`parseChangedEntry parse error: ${field}`);
|
||||
throw new Error(`Failed to parse status line for changed entry`);
|
||||
}
|
||||
|
||||
return {
|
||||
kind: 'entry',
|
||||
statusCode: match[1],
|
||||
path: match[8]
|
||||
};
|
||||
}
|
||||
|
||||
// 2 <XY> <sub> <mH> <mI> <mW> <hH> <hI> <X><score> <path><sep><origPath>
|
||||
const renamedOrCopiedEntryRe =
|
||||
/^2 ([MADRCUTX?!.]{2}) (N\.\.\.|S[C.][M.][U.]) (\d+) (\d+) (\d+) ([a-f0-9]+) ([a-f0-9]+) ([RC]\d+) ([\s\S]*?)$/;
|
||||
|
||||
function parsedRenamedOrCopiedEntry(field: string, oldPath: string | undefined): IStatusEntry {
|
||||
const match = renamedOrCopiedEntryRe.exec(field);
|
||||
|
||||
if (!match) {
|
||||
console.debug(`parsedRenamedOrCopiedEntry parse error: ${field}`);
|
||||
throw new Error(`Failed to parse status line for renamed or copied entry`);
|
||||
}
|
||||
|
||||
if (!oldPath) {
|
||||
throw new Error('Failed to parse renamed or copied entry, could not parse old path');
|
||||
}
|
||||
|
||||
return {
|
||||
kind: 'entry',
|
||||
statusCode: match[1],
|
||||
oldPath,
|
||||
path: match[9]
|
||||
};
|
||||
}
|
||||
|
||||
// u <xy> <sub> <m1> <m2> <m3> <mW> <h1> <h2> <h3> <path>
|
||||
const unmergedEntryRe =
|
||||
/^u ([DAU]{2}) (N\.\.\.|S[C.][M.][U.]) (\d+) (\d+) (\d+) (\d+) ([a-f0-9]+) ([a-f0-9]+) ([a-f0-9]+) ([\s\S]*?)$/;
|
||||
|
||||
function parseUnmergedEntry(field: string): IStatusEntry {
|
||||
const match = unmergedEntryRe.exec(field);
|
||||
|
||||
if (!match) {
|
||||
console.debug(`parseUnmergedEntry parse error: ${field}`);
|
||||
throw new Error(`Failed to parse status line for unmerged entry`);
|
||||
}
|
||||
|
||||
return {
|
||||
kind: 'entry',
|
||||
statusCode: match[1],
|
||||
path: match[10]
|
||||
};
|
||||
}
|
||||
|
||||
function parseUntrackedEntry(field: string): IStatusEntry {
|
||||
const path = field.substr(2);
|
||||
return {
|
||||
kind: 'entry',
|
||||
// NOTE: We return ?? instead of ? here to play nice with mapStatus,
|
||||
// might want to consider changing this (and mapStatus) in the future.
|
||||
statusCode: '??',
|
||||
path
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Map the raw status text from Git to a structure we can work with in the app.
|
||||
*/
|
||||
export function mapStatus(status: string): FileEntry {
|
||||
if (status === '??') {
|
||||
return {
|
||||
kind: 'untracked'
|
||||
};
|
||||
}
|
||||
|
||||
if (status === '.M') {
|
||||
return {
|
||||
kind: 'ordinary',
|
||||
type: 'modified',
|
||||
index: GitStatusEntry.Unchanged,
|
||||
workingTree: GitStatusEntry.Modified
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'M.') {
|
||||
return {
|
||||
kind: 'ordinary',
|
||||
type: 'modified',
|
||||
index: GitStatusEntry.Modified,
|
||||
workingTree: GitStatusEntry.Unchanged
|
||||
};
|
||||
}
|
||||
|
||||
if (status === '.A') {
|
||||
return {
|
||||
kind: 'ordinary',
|
||||
type: 'added',
|
||||
index: GitStatusEntry.Unchanged,
|
||||
workingTree: GitStatusEntry.Added
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'A.') {
|
||||
return {
|
||||
kind: 'ordinary',
|
||||
type: 'added',
|
||||
index: GitStatusEntry.Added,
|
||||
workingTree: GitStatusEntry.Unchanged
|
||||
};
|
||||
}
|
||||
|
||||
if (status === '.D') {
|
||||
return {
|
||||
kind: 'ordinary',
|
||||
type: 'deleted',
|
||||
index: GitStatusEntry.Unchanged,
|
||||
workingTree: GitStatusEntry.Deleted
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'D.') {
|
||||
return {
|
||||
kind: 'ordinary',
|
||||
type: 'deleted',
|
||||
index: GitStatusEntry.Deleted,
|
||||
workingTree: GitStatusEntry.Unchanged
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'R.') {
|
||||
return {
|
||||
kind: 'renamed',
|
||||
index: GitStatusEntry.Renamed,
|
||||
workingTree: GitStatusEntry.Unchanged
|
||||
};
|
||||
}
|
||||
|
||||
if (status === '.R') {
|
||||
return {
|
||||
kind: 'renamed',
|
||||
index: GitStatusEntry.Unchanged,
|
||||
workingTree: GitStatusEntry.Renamed
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'C.') {
|
||||
return {
|
||||
kind: 'copied',
|
||||
index: GitStatusEntry.Copied,
|
||||
workingTree: GitStatusEntry.Unchanged
|
||||
};
|
||||
}
|
||||
|
||||
if (status === '.C') {
|
||||
return {
|
||||
kind: 'copied',
|
||||
index: GitStatusEntry.Unchanged,
|
||||
workingTree: GitStatusEntry.Copied
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'AD') {
|
||||
return {
|
||||
kind: 'ordinary',
|
||||
type: 'added',
|
||||
index: GitStatusEntry.Added,
|
||||
workingTree: GitStatusEntry.Deleted
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'AM') {
|
||||
return {
|
||||
kind: 'ordinary',
|
||||
type: 'added',
|
||||
index: GitStatusEntry.Added,
|
||||
workingTree: GitStatusEntry.Modified
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'RM') {
|
||||
return {
|
||||
kind: 'renamed',
|
||||
index: GitStatusEntry.Renamed,
|
||||
workingTree: GitStatusEntry.Modified
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'RD') {
|
||||
return {
|
||||
kind: 'renamed',
|
||||
index: GitStatusEntry.Renamed,
|
||||
workingTree: GitStatusEntry.Deleted
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'DD') {
|
||||
return {
|
||||
kind: 'conflicted',
|
||||
action: UnmergedEntrySummary.BothDeleted,
|
||||
us: GitStatusEntry.Deleted,
|
||||
them: GitStatusEntry.Deleted
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'AU') {
|
||||
return {
|
||||
kind: 'conflicted',
|
||||
action: UnmergedEntrySummary.AddedByUs,
|
||||
us: GitStatusEntry.Added,
|
||||
them: GitStatusEntry.UpdatedButUnmerged
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'UD') {
|
||||
return {
|
||||
kind: 'conflicted',
|
||||
action: UnmergedEntrySummary.DeletedByThem,
|
||||
us: GitStatusEntry.UpdatedButUnmerged,
|
||||
them: GitStatusEntry.Deleted
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'UA') {
|
||||
return {
|
||||
kind: 'conflicted',
|
||||
action: UnmergedEntrySummary.AddedByThem,
|
||||
us: GitStatusEntry.UpdatedButUnmerged,
|
||||
them: GitStatusEntry.Added
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'DU') {
|
||||
return {
|
||||
kind: 'conflicted',
|
||||
action: UnmergedEntrySummary.DeletedByUs,
|
||||
us: GitStatusEntry.Deleted,
|
||||
them: GitStatusEntry.UpdatedButUnmerged
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'AA') {
|
||||
return {
|
||||
kind: 'conflicted',
|
||||
action: UnmergedEntrySummary.BothAdded,
|
||||
us: GitStatusEntry.Added,
|
||||
them: GitStatusEntry.Added
|
||||
};
|
||||
}
|
||||
|
||||
if (status === 'UU') {
|
||||
return {
|
||||
kind: 'conflicted',
|
||||
action: UnmergedEntrySummary.BothModified,
|
||||
us: GitStatusEntry.UpdatedButUnmerged,
|
||||
them: GitStatusEntry.UpdatedButUnmerged
|
||||
};
|
||||
}
|
||||
|
||||
// as a fallback, we assume the file is modified in some way
|
||||
return {
|
||||
kind: 'ordinary',
|
||||
type: 'modified'
|
||||
};
|
||||
}
|
||||
355
packages/noodl-git/src/core/status.ts
Normal file
355
packages/noodl-git/src/core/status.ts
Normal file
@@ -0,0 +1,355 @@
|
||||
import {
|
||||
AppFileStatus,
|
||||
FileStatusKind,
|
||||
GitStatusEntry,
|
||||
ConflictedFileStatus,
|
||||
UnmergedEntrySummary,
|
||||
WorkingDirectoryStatus,
|
||||
WorkingDirectoryFileChange,
|
||||
FileEntry
|
||||
} from './models/status';
|
||||
|
||||
import {
|
||||
parsePorcelainStatus,
|
||||
mapStatus,
|
||||
IStatusEntry,
|
||||
IStatusHeader,
|
||||
isStatusHeader,
|
||||
isStatusEntry
|
||||
} from './status-parser';
|
||||
|
||||
import { DiffSelectionType, DiffSelection } from './models/diff';
|
||||
import { git } from './client';
|
||||
import { isMergeHeadSet } from './merge';
|
||||
import { getRebaseInternalState, RebaseInternalState } from './rebase';
|
||||
import { getFilesWithConflictMarkers } from './diff-check';
|
||||
import { getBinaryPaths } from './diff';
|
||||
|
||||
/**
|
||||
* V8 has a limit on the size of string it can create (~256MB), and unless we want to
|
||||
* trigger an unhandled exception we need to do the encoding conversion by hand.
|
||||
*
|
||||
* As we may be executing status often, we should keep this to a reasonable threshold.
|
||||
*/
|
||||
const MaxStatusBufferSize = 20e6; // 20MB in decimal
|
||||
|
||||
/** The encapsulation of the result from 'git status' */
|
||||
export interface IStatusResult {
|
||||
/** The name of the current branch */
|
||||
readonly currentBranch?: string;
|
||||
|
||||
/** The name of the current upstream branch */
|
||||
readonly currentUpstreamBranch?: string;
|
||||
|
||||
/** The SHA of the tip commit of the current branch */
|
||||
readonly currentTip?: string;
|
||||
|
||||
/** How many commits ahead and behind
|
||||
* the `currentBranch` is compared to the `currentUpstreamBranch`
|
||||
*/
|
||||
readonly branchAheadBehind?: any; // IAheadBehind
|
||||
|
||||
/** true if the repository exists at the given location */
|
||||
readonly exists: boolean;
|
||||
|
||||
/** true if repository is in a conflicted state */
|
||||
readonly mergeHeadFound: boolean;
|
||||
|
||||
/** true merge --squash operation started */
|
||||
readonly squashMsgFound: boolean;
|
||||
|
||||
/** details about the rebase operation, if found */
|
||||
readonly rebaseInternalState: any; // RebaseInternalState | null
|
||||
|
||||
/** true if repository is in cherry pick state */
|
||||
readonly isCherryPickingHeadFound: boolean;
|
||||
|
||||
/** the absolute path to the repository's working directory */
|
||||
readonly workingDirectory: WorkingDirectoryStatus;
|
||||
|
||||
/** whether conflicting files present on repository */
|
||||
readonly doConflictedFilesExist: boolean;
|
||||
}
|
||||
|
||||
interface IStatusHeadersData {
|
||||
currentBranch?: string;
|
||||
currentUpstreamBranch?: string;
|
||||
currentTip?: string;
|
||||
branchAheadBehind?: any; // IAheadBehind
|
||||
match: RegExpMatchArray | null;
|
||||
}
|
||||
|
||||
type ConflictFilesDetails = {
|
||||
conflictCountsByPath: ReadonlyMap<string, number>;
|
||||
binaryFilePaths: ReadonlyArray<string>;
|
||||
};
|
||||
|
||||
function parseConflictedState(
|
||||
entry: any, // UnmergedEntry,
|
||||
path: string,
|
||||
conflictDetails: ConflictFilesDetails
|
||||
): ConflictedFileStatus {
|
||||
switch (entry.action) {
|
||||
case UnmergedEntrySummary.BothAdded: {
|
||||
const isBinary = conflictDetails.binaryFilePaths.includes(path);
|
||||
if (!isBinary) {
|
||||
return {
|
||||
kind: FileStatusKind.Conflicted,
|
||||
entry,
|
||||
conflictMarkerCount: conflictDetails.conflictCountsByPath.get(path) || 0
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
kind: FileStatusKind.Conflicted,
|
||||
entry
|
||||
};
|
||||
}
|
||||
}
|
||||
case UnmergedEntrySummary.BothModified: {
|
||||
const isBinary = conflictDetails.binaryFilePaths.includes(path);
|
||||
if (!isBinary) {
|
||||
return {
|
||||
kind: FileStatusKind.Conflicted,
|
||||
entry,
|
||||
conflictMarkerCount: conflictDetails.conflictCountsByPath.get(path) || 0
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
kind: FileStatusKind.Conflicted,
|
||||
entry
|
||||
};
|
||||
}
|
||||
}
|
||||
default:
|
||||
return {
|
||||
kind: FileStatusKind.Conflicted,
|
||||
entry
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function convertToAppStatus(
|
||||
path: string,
|
||||
entry: FileEntry,
|
||||
conflictDetails: ConflictFilesDetails,
|
||||
oldPath?: string
|
||||
): AppFileStatus {
|
||||
if (entry.kind === 'ordinary') {
|
||||
switch (entry.type) {
|
||||
case 'added':
|
||||
return { kind: FileStatusKind.New };
|
||||
case 'modified':
|
||||
return { kind: FileStatusKind.Modified };
|
||||
case 'deleted':
|
||||
return { kind: FileStatusKind.Deleted };
|
||||
}
|
||||
} else if (entry.kind === 'copied' && oldPath != null) {
|
||||
return { kind: FileStatusKind.Copied, oldPath };
|
||||
} else if (entry.kind === 'renamed' && oldPath != null) {
|
||||
return { kind: FileStatusKind.Renamed, oldPath };
|
||||
} else if (entry.kind === 'untracked') {
|
||||
return { kind: FileStatusKind.Untracked };
|
||||
} else if (entry.kind === 'conflicted') {
|
||||
return parseConflictedState(entry, path, conflictDetails);
|
||||
}
|
||||
|
||||
throw new Error(`Unknown file status ${status}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update status header based on the current header entry.
|
||||
* Reducer.
|
||||
*/
|
||||
function parseStatusHeader(results: IStatusHeadersData, header: IStatusHeader) {
|
||||
let { currentBranch, currentUpstreamBranch, currentTip, branchAheadBehind, match } = results;
|
||||
const value = header.value;
|
||||
|
||||
// This intentionally does not match branch.oid initial
|
||||
if ((match = value.match(/^branch\.oid ([a-f0-9]+)$/))) {
|
||||
currentTip = match[1];
|
||||
} else if ((match = value.match(/^branch.head (.*)/))) {
|
||||
if (match[1] !== '(detached)') {
|
||||
currentBranch = match[1];
|
||||
}
|
||||
} else if ((match = value.match(/^branch.upstream (.*)/))) {
|
||||
currentUpstreamBranch = match[1];
|
||||
} else if ((match = value.match(/^branch.ab \+(\d+) -(\d+)$/))) {
|
||||
const ahead = parseInt(match[1], 10);
|
||||
const behind = parseInt(match[2], 10);
|
||||
|
||||
if (!isNaN(ahead) && !isNaN(behind)) {
|
||||
branchAheadBehind = { ahead, behind };
|
||||
}
|
||||
}
|
||||
return {
|
||||
currentBranch,
|
||||
currentUpstreamBranch,
|
||||
currentTip,
|
||||
branchAheadBehind,
|
||||
match
|
||||
};
|
||||
}
|
||||
|
||||
// List of known conflicted index entries for a file, extracted from mapStatus
|
||||
// inside `app/src/lib/status-parser.ts` for convenience
|
||||
const conflictStatusCodes = ['DD', 'AU', 'UD', 'UA', 'DU', 'AA', 'UU'];
|
||||
|
||||
/**
|
||||
* Retrieve the status for a given repository,
|
||||
* and fail gracefully if the location is not a Git repository
|
||||
*/
|
||||
export async function getStatus(repositoryDir: string): Promise<WorkingDirectoryStatus | null> {
|
||||
const args = ['--no-optional-locks', 'status', '--untracked-files=all', '--branch', '--porcelain=2', '-z'];
|
||||
|
||||
const result = await git(args, repositoryDir, 'getStatus', {
|
||||
successExitCodes: new Set([0, 128])
|
||||
});
|
||||
|
||||
if (result.exitCode === 128) {
|
||||
console.debug(`'git status' returned 128 for '${repositoryDir}' and is likely missing its .git directory`);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (result.output.length > MaxStatusBufferSize) {
|
||||
console.error(
|
||||
`'git status' emitted ${result.output.length} bytes, which is beyond the supported threshold of ${MaxStatusBufferSize} bytes`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const stdout = result.output;
|
||||
const parsed = parsePorcelainStatus(stdout);
|
||||
// const headers = parsed.filter(isStatusHeader)
|
||||
const entries = parsed.filter(isStatusEntry);
|
||||
|
||||
const mergeHeadFound = await isMergeHeadSet(repositoryDir);
|
||||
const conflictedFilesInIndex = entries.some((e) => conflictStatusCodes.indexOf(e.statusCode) > -1);
|
||||
const rebaseInternalState = await getRebaseInternalState(repositoryDir);
|
||||
|
||||
const conflictDetails = await getConflictDetails(
|
||||
repositoryDir,
|
||||
mergeHeadFound,
|
||||
conflictedFilesInIndex,
|
||||
rebaseInternalState
|
||||
);
|
||||
|
||||
// Map of files keyed on their paths.
|
||||
const files = entries.reduce(
|
||||
(files, entry) => buildStatusMap(files, entry, conflictDetails),
|
||||
new Map<string, WorkingDirectoryFileChange>()
|
||||
);
|
||||
|
||||
return WorkingDirectoryStatus.fromFiles([...files.values()]);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Update map of working directory changes with a file status entry.
|
||||
* Reducer(ish).
|
||||
*
|
||||
* (Map is used here to maintain insertion order.)
|
||||
*/
|
||||
function buildStatusMap(
|
||||
files: Map<string, WorkingDirectoryFileChange>,
|
||||
entry: IStatusEntry,
|
||||
conflictDetails: ConflictFilesDetails
|
||||
): Map<string, WorkingDirectoryFileChange> {
|
||||
const status = mapStatus(entry.statusCode);
|
||||
|
||||
if (status.kind === 'ordinary') {
|
||||
// when a file is added in the index but then removed in the working
|
||||
// directory, the file won't be part of the commit, so we can skip
|
||||
// displaying this entry in the changes list
|
||||
if (status.index === GitStatusEntry.Added && status.workingTree === GitStatusEntry.Deleted) {
|
||||
return files;
|
||||
}
|
||||
}
|
||||
|
||||
if (status.kind === 'untracked') {
|
||||
// when a delete has been staged, but an untracked file exists with the
|
||||
// same path, we should ensure that we only draw one entry in the
|
||||
// changes list - see if an entry already exists for this path and
|
||||
// remove it if found
|
||||
files.delete(entry.path);
|
||||
}
|
||||
|
||||
// for now we just poke at the existing summary
|
||||
const appStatus = convertToAppStatus(entry.path, status, conflictDetails, entry.oldPath);
|
||||
|
||||
const selection = DiffSelection.fromInitialSelection(DiffSelectionType.All);
|
||||
|
||||
files.set(entry.path, new WorkingDirectoryFileChange(entry.path, appStatus, selection));
|
||||
return files;
|
||||
}
|
||||
|
||||
async function getMergeConflictDetails(repositoryDir: string) {
|
||||
const conflictCountsByPath = await getFilesWithConflictMarkers(repositoryDir);
|
||||
const binaryFilePaths = await getBinaryPaths(repositoryDir, 'MERGE_HEAD');
|
||||
return {
|
||||
conflictCountsByPath,
|
||||
binaryFilePaths
|
||||
};
|
||||
}
|
||||
|
||||
async function getRebaseConflictDetails(repositoryDir: string) {
|
||||
const conflictCountsByPath = await getFilesWithConflictMarkers(repositoryDir);
|
||||
const binaryFilePaths = await getBinaryPaths(repositoryDir, 'REBASE_HEAD');
|
||||
return {
|
||||
conflictCountsByPath,
|
||||
binaryFilePaths
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* We need to do these operations to detect conflicts that were the result
|
||||
* of popping a stash into the index
|
||||
*/
|
||||
async function getWorkingDirectoryConflictDetails(repositoryDir: string) {
|
||||
const conflictCountsByPath = await getFilesWithConflictMarkers(repositoryDir);
|
||||
let binaryFilePaths: ReadonlyArray<string> = [];
|
||||
try {
|
||||
// its totally fine if HEAD doesn't exist, which throws an error
|
||||
binaryFilePaths = await getBinaryPaths(repositoryDir, 'HEAD');
|
||||
} catch (error) {}
|
||||
|
||||
return {
|
||||
conflictCountsByPath,
|
||||
binaryFilePaths
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* gets the conflicted files count and binary file paths in a given repository.
|
||||
* for computing an `IStatusResult`.
|
||||
*
|
||||
* @param repositoryDir to get details from
|
||||
* @param mergeHeadFound whether a merge conflict has been detected
|
||||
* @param lookForStashConflicts whether it looks like a stash has introduced conflicts
|
||||
* @param rebaseInternalState details about the current rebase operation (if found)
|
||||
*/
|
||||
async function getConflictDetails(
|
||||
repositoryDir: string,
|
||||
mergeHeadFound: boolean,
|
||||
lookForStashConflicts: boolean,
|
||||
rebaseInternalState: RebaseInternalState | null
|
||||
): Promise<ConflictFilesDetails> {
|
||||
try {
|
||||
if (mergeHeadFound) {
|
||||
return await getMergeConflictDetails(repositoryDir);
|
||||
}
|
||||
|
||||
if (rebaseInternalState !== null) {
|
||||
return await getRebaseConflictDetails(repositoryDir);
|
||||
}
|
||||
|
||||
if (lookForStashConflicts) {
|
||||
return await getWorkingDirectoryConflictDetails(repositoryDir);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Unexpected error from git operations in getConflictDetails', error);
|
||||
}
|
||||
return {
|
||||
conflictCountsByPath: new Map<string, number>(),
|
||||
binaryFilePaths: new Array<string>()
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
/**
|
||||
* Send a caught (ie. non-fatal) exception to the non-fatal error bucket
|
||||
*
|
||||
* The intended use of this message is for getting insight into areas of the
|
||||
* code where we suspect alternate failure modes other than those accounted for.
|
||||
*
|
||||
* Example: In the Desktop tutorial creation logic we handle all errors and our
|
||||
* initial belief was that the only two failure modes we would have to account
|
||||
* for were either the repo existing on disk or on the user's account. We now
|
||||
* suspect that there might be other reasons why the creation logic is failing
|
||||
* and therefore want to send all errors encountered during creation to central
|
||||
* where we can determine if there are additional failure modes for us to
|
||||
* consider.
|
||||
*
|
||||
* @param kind - a grouping key that allows us to group all errors originating
|
||||
* in the same area of the code base or relating to the same kind of failure
|
||||
* (recommend a single non-hyphenated word) Example: tutorialRepoCreation
|
||||
*
|
||||
* @param error - the caught error
|
||||
*/
|
||||
|
||||
let lastNonFatalException: number | undefined = undefined;
|
||||
|
||||
/** Max one non fatal exeception per minute */
|
||||
const minIntervalBetweenNonFatalExceptions = 60 * 1000;
|
||||
|
||||
export function sendNonFatalException(kind: string, error: Error) {
|
||||
const now = Date.now();
|
||||
|
||||
if (lastNonFatalException !== undefined && now - lastNonFatalException < minIntervalBetweenNonFatalExceptions) {
|
||||
return;
|
||||
}
|
||||
|
||||
lastNonFatalException = now;
|
||||
// NOTE: Update BugTracker with more functions to support this there instead
|
||||
console.error(kind, error);
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
import { TrampolineCommandHandler } from './trampoline-command';
|
||||
|
||||
export type RequestGitAccountFuncReturn = { username: string; password: string };
|
||||
export type RequestGitAccountFunc = (endpoint: string) => Promise<RequestGitAccountFuncReturn>;
|
||||
|
||||
let requestGitAccount: RequestGitAccountFunc | undefined = undefined;
|
||||
|
||||
export function setRequestGitAccount(func: RequestGitAccountFunc) {
|
||||
clearCredentialsCache();
|
||||
requestGitAccount = func;
|
||||
}
|
||||
|
||||
export function clearCredentialsCache() {
|
||||
accounts.clear();
|
||||
}
|
||||
|
||||
type AccountCache = RequestGitAccountFuncReturn & { updatedAt: number };
|
||||
const accounts = new Map<string, AccountCache>();
|
||||
|
||||
const ACCOUNT_CACHE_TTL = 5 * 60 * 1000;
|
||||
|
||||
export async function fetchAccount(endpoint: string, token: string) {
|
||||
const now = Date.now();
|
||||
|
||||
if (accounts.has(endpoint)) {
|
||||
const account = accounts.get(endpoint);
|
||||
|
||||
// Check if it has expired
|
||||
const expired = account.updatedAt + ACCOUNT_CACHE_TTL < now;
|
||||
if (!expired) {
|
||||
return account;
|
||||
}
|
||||
}
|
||||
|
||||
if (!requestGitAccount) {
|
||||
throw new Error('Git Authentication is not configured');
|
||||
}
|
||||
|
||||
// Fetch the account
|
||||
let account;
|
||||
try {
|
||||
account = await requestGitAccount(endpoint);
|
||||
} catch (e) {
|
||||
console.log('Failed to get git auth', e);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Create cache entry
|
||||
const entry: AccountCache = { ...account, updatedAt: now };
|
||||
accounts.set(endpoint, entry);
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
const endpointRegex = /\/\/(\w+@?(.+))'/;
|
||||
|
||||
export const askpassTrampolineHandler: TrampolineCommandHandler = async (command) => {
|
||||
if (command.parameters.length !== 1) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Example:
|
||||
// "Username for 'https://ngit1.noodlapp.com': "
|
||||
// "Password for 'https://a@ngit1.noodlapp.com': "
|
||||
|
||||
const firstParameter = command.parameters[0];
|
||||
const askUsername = firstParameter.startsWith('Username');
|
||||
const askPassword = firstParameter.startsWith('Password');
|
||||
|
||||
if (askUsername || askPassword) {
|
||||
const endpointMatch = firstParameter.match(endpointRegex);
|
||||
if (endpointMatch.length !== 3) {
|
||||
throw new Error('Unable to read the git endpoint.');
|
||||
}
|
||||
|
||||
const endpoint = endpointMatch[1];
|
||||
|
||||
// Fetch the account with
|
||||
const account = await fetchAccount(endpoint, command.trampolineToken);
|
||||
if (!account) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Return the username or password based on request
|
||||
if (askUsername) {
|
||||
return account?.username;
|
||||
} else if (askPassword) {
|
||||
return account?.password;
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
@@ -0,0 +1,152 @@
|
||||
import { sendNonFatalException } from './non-fatal-exception';
|
||||
import { ITrampolineCommand, TrampolineCommandIdentifier } from './trampoline-command';
|
||||
|
||||
/**
|
||||
* Parse a string into the given (string) enum type. Returns undefined if the
|
||||
* enum type provided did not match any of the keys in the enum.
|
||||
*/
|
||||
export function parseEnumValue<T extends string>(enumObj: Record<string, T>, value: string): T | undefined {
|
||||
return Object.values(enumObj).find((v) => v === value);
|
||||
}
|
||||
|
||||
enum TrampolineCommandParserState {
|
||||
ParameterCount,
|
||||
Parameters,
|
||||
EnvironmentVariablesCount,
|
||||
EnvironmentVariables,
|
||||
Finished
|
||||
}
|
||||
|
||||
/**
|
||||
* The purpose of this class is to process the data received from the trampoline
|
||||
* client and build a command from it.
|
||||
*/
|
||||
export class TrampolineCommandParser {
|
||||
private parameterCount: number = 0;
|
||||
private readonly parameters: string[] = [];
|
||||
private environmentVariablesCount: number = 0;
|
||||
private readonly environmentVariables = new Map<string, string>();
|
||||
|
||||
private state: TrampolineCommandParserState = TrampolineCommandParserState.ParameterCount;
|
||||
|
||||
/** Whether or not it has finished parsing the command. */
|
||||
public hasFinished() {
|
||||
return this.state === TrampolineCommandParserState.Finished;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a chunk of data and processes it depending on the current state.
|
||||
*
|
||||
* Throws an error if it's invoked after the parser has finished, or if
|
||||
* anything unexpected is received.
|
||||
**/
|
||||
public processValue(value: string) {
|
||||
switch (this.state) {
|
||||
case TrampolineCommandParserState.ParameterCount:
|
||||
this.parameterCount = parseInt(value);
|
||||
|
||||
if (this.parameterCount > 0) {
|
||||
this.state = TrampolineCommandParserState.Parameters;
|
||||
} else {
|
||||
this.state = TrampolineCommandParserState.EnvironmentVariablesCount;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case TrampolineCommandParserState.Parameters:
|
||||
this.parameters.push(value);
|
||||
if (this.parameters.length === this.parameterCount) {
|
||||
this.state = TrampolineCommandParserState.EnvironmentVariablesCount;
|
||||
}
|
||||
break;
|
||||
|
||||
case TrampolineCommandParserState.EnvironmentVariablesCount:
|
||||
this.environmentVariablesCount = parseInt(value);
|
||||
|
||||
if (this.environmentVariablesCount > 0) {
|
||||
this.state = TrampolineCommandParserState.EnvironmentVariables;
|
||||
} else {
|
||||
this.state = TrampolineCommandParserState.Finished;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case TrampolineCommandParserState.EnvironmentVariables:
|
||||
// Split after the first '='
|
||||
const match = /([^=]+)=(.*)/.exec(value);
|
||||
|
||||
if (
|
||||
match === null ||
|
||||
// Length must be 3: the 2 groups + the whole string
|
||||
match.length !== 3
|
||||
) {
|
||||
throw new Error(`Unexpected environment variable format: ${value}`);
|
||||
}
|
||||
|
||||
const variableKey = match[1];
|
||||
const variableValue = match[2];
|
||||
|
||||
this.environmentVariables.set(variableKey, variableValue);
|
||||
|
||||
if (this.environmentVariables.size === this.environmentVariablesCount) {
|
||||
this.state = TrampolineCommandParserState.Finished;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error(`Received value during invalid state: ${this.state}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a command.
|
||||
*
|
||||
* It will return null if the parser hasn't finished yet, or if the identifier
|
||||
* is missing or invalid.
|
||||
**/
|
||||
public toCommand(): ITrampolineCommand | null {
|
||||
if (this.hasFinished() === false) {
|
||||
const error = new Error('The command cannot be generated if parsing is not finished');
|
||||
this.logCommandCreationError(error);
|
||||
return null;
|
||||
}
|
||||
|
||||
const identifierString = this.environmentVariables.get('DESKTOP_TRAMPOLINE_IDENTIFIER');
|
||||
|
||||
if (identifierString === undefined) {
|
||||
const error = new Error(
|
||||
`The command identifier is missing. Env variables received: ${Array.from(this.environmentVariables.keys())}`
|
||||
);
|
||||
this.logCommandCreationError(error);
|
||||
return null;
|
||||
}
|
||||
|
||||
const identifier = parseEnumValue(TrampolineCommandIdentifier, identifierString);
|
||||
|
||||
if (identifier === undefined) {
|
||||
const error = new Error(`The command identifier ${identifierString} is not supported`);
|
||||
this.logCommandCreationError(error);
|
||||
return null;
|
||||
}
|
||||
|
||||
const trampolineToken = this.environmentVariables.get('DESKTOP_TRAMPOLINE_TOKEN');
|
||||
|
||||
if (trampolineToken === undefined) {
|
||||
const error = new Error(`The trampoline token is missing`);
|
||||
this.logCommandCreationError(error);
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
identifier,
|
||||
trampolineToken,
|
||||
parameters: this.parameters,
|
||||
environmentVariables: this.environmentVariables
|
||||
};
|
||||
}
|
||||
|
||||
private logCommandCreationError(error: Error) {
|
||||
console.error('Error creating trampoline command:', error);
|
||||
sendNonFatalException('trampolineCommandParser', error);
|
||||
}
|
||||
}
|
||||
41
packages/noodl-git/src/core/trampoline/trampoline-command.ts
Normal file
41
packages/noodl-git/src/core/trampoline/trampoline-command.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
export enum TrampolineCommandIdentifier {
|
||||
AskPass = 'ASKPASS'
|
||||
}
|
||||
|
||||
/** Represents a command in our trampoline mechanism. */
|
||||
export interface ITrampolineCommand {
|
||||
/**
|
||||
* Identifier of the command.
|
||||
*
|
||||
* This will be used to find a suitable handler in the app to react to the
|
||||
* command.
|
||||
*/
|
||||
readonly identifier: TrampolineCommandIdentifier;
|
||||
|
||||
/**
|
||||
* Trampoline token sent with this command via the DESKTOP_TRAMPOLINE_TOKEN
|
||||
* environment variable.
|
||||
*/
|
||||
readonly trampolineToken: string;
|
||||
|
||||
/**
|
||||
* Parameters of the command.
|
||||
*
|
||||
* This corresponds to the command line arguments (argv) except the name of
|
||||
* the program (argv[0]).
|
||||
*/
|
||||
readonly parameters: ReadonlyArray<string>;
|
||||
|
||||
/** Environment variables that were set when the command was invoked. */
|
||||
readonly environmentVariables: ReadonlyMap<string, string>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a handler function for a trampoline command.
|
||||
*
|
||||
* @param command The invoked trampoline command to handle.
|
||||
* @returns A string with the result of the command (which will be
|
||||
* printed via
|
||||
* stdout by the trampoline client), or undefined
|
||||
*/
|
||||
export type TrampolineCommandHandler = (command: ITrampolineCommand) => Promise<string | undefined>;
|
||||
@@ -0,0 +1,31 @@
|
||||
import { trampolineServer } from "./trampoline-server";
|
||||
import { withTrampolineToken } from "./trampoline-tokens";
|
||||
import { TrampolineCommandIdentifier } from "../trampoline/trampoline-command";
|
||||
import { getDesktopTrampolinePath } from "../../paths";
|
||||
|
||||
/**
|
||||
* Allows invoking a function with a set of environment variables to use when
|
||||
* invoking a Git subcommand that needs to use the trampoline (mainly git
|
||||
* operations requiring an askpass script) and with a token to use in the
|
||||
* trampoline server.
|
||||
*
|
||||
* @param fn Function to invoke with all the necessary environment
|
||||
* variables.
|
||||
*/
|
||||
export async function withTrampolineEnv<T>(
|
||||
fn: (env: Object) => Promise<T>
|
||||
): Promise<T> {
|
||||
return withTrampolineToken(async (token) => {
|
||||
const desktopTrampolinePath = getDesktopTrampolinePath();
|
||||
const desktopPort = await trampolineServer.getPort();
|
||||
|
||||
const result = await fn({
|
||||
GIT_ASKPASS: desktopTrampolinePath,
|
||||
DESKTOP_PORT: desktopPort,
|
||||
DESKTOP_TRAMPOLINE_TOKEN: token,
|
||||
DESKTOP_TRAMPOLINE_IDENTIFIER: TrampolineCommandIdentifier.AskPass,
|
||||
});
|
||||
|
||||
return result;
|
||||
});
|
||||
}
|
||||
176
packages/noodl-git/src/core/trampoline/trampoline-server.ts
Normal file
176
packages/noodl-git/src/core/trampoline/trampoline-server.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
import { createServer, AddressInfo, Server, Socket } from 'net';
|
||||
import split2 from 'split2';
|
||||
import { sendNonFatalException } from './non-fatal-exception';
|
||||
import { askpassTrampolineHandler } from './trampoline-askpass-handler';
|
||||
import { ITrampolineCommand, TrampolineCommandHandler, TrampolineCommandIdentifier } from './trampoline-command';
|
||||
import { TrampolineCommandParser } from './trampoline-command-parser';
|
||||
import { isValidTrampolineToken } from './trampoline-tokens';
|
||||
|
||||
/**
|
||||
* This class represents the "trampoline server". The trampoline is something
|
||||
* we'll hand to git in order to communicate with the App without noticing. A
|
||||
* notable example of this would be GIT_ASKPASS.
|
||||
*
|
||||
* This server is designed so that it will start lazily when the app performs a
|
||||
* remote git operation. At that point, the app will try to retrieve the
|
||||
* server's port, which will run the server first if needed.
|
||||
*
|
||||
* The idea behind this is to simplify the retry approach in case of error:
|
||||
* instead of reacting to errors with an immediate retry, the server will remain
|
||||
* closed until the next time the app needs it (i.e. in the next git remote
|
||||
* operation).
|
||||
*/
|
||||
export class TrampolineServer {
|
||||
private readonly server: Server;
|
||||
private listeningPromise: Promise<void> | null = null;
|
||||
|
||||
private readonly commandHandlers = new Map<TrampolineCommandIdentifier, TrampolineCommandHandler>();
|
||||
|
||||
public constructor() {
|
||||
this.server = createServer((socket) => this.onNewConnection(socket));
|
||||
|
||||
// Make sure the server is always unref'ed, so it doesn't keep the app alive
|
||||
// for longer than needed. Not having this made the CI tasks on Windows
|
||||
// timeout because the unit tests completed in about 7min, but the test
|
||||
// suite runner would never finish, hitting a 45min timeout for the whole
|
||||
// GitHub Action.
|
||||
this.server.unref();
|
||||
|
||||
this.registerCommandHandler(TrampolineCommandIdentifier.AskPass, askpassTrampolineHandler);
|
||||
}
|
||||
|
||||
private async listen(): Promise<void> {
|
||||
this.listeningPromise = new Promise((resolve, reject) => {
|
||||
// Observe errors while trying to start the server
|
||||
this.server.on('error', (error) => {
|
||||
console.error(error);
|
||||
reject(error);
|
||||
this.close();
|
||||
});
|
||||
|
||||
this.server.listen(0, '127.0.0.1', async () => {
|
||||
// Replace the error handler
|
||||
this.server.removeAllListeners('error');
|
||||
this.server.on('error', this.onServerError);
|
||||
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
return this.listeningPromise;
|
||||
}
|
||||
|
||||
private async close() {
|
||||
// Make sure the server is not trying to start
|
||||
if (this.listeningPromise !== null) {
|
||||
await this.listeningPromise;
|
||||
}
|
||||
|
||||
// Reset the server, it will be restarted lazily the next time it's needed
|
||||
this.server.close();
|
||||
this.server.removeAllListeners('error');
|
||||
this.listeningPromise = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function will retrieve the port of the server, or null if the server
|
||||
* is not running.
|
||||
*
|
||||
* In order to get the server port, it might need to start the server if it's
|
||||
* not running already.
|
||||
*/
|
||||
public async getPort() {
|
||||
if (this.port !== null) {
|
||||
return this.port;
|
||||
}
|
||||
|
||||
if (this.listeningPromise !== null) {
|
||||
await this.listeningPromise;
|
||||
} else {
|
||||
await this.listen();
|
||||
}
|
||||
|
||||
return this.port;
|
||||
}
|
||||
|
||||
private get port(): number | null {
|
||||
const address = this.server.address() as AddressInfo;
|
||||
|
||||
if (address && address.port) {
|
||||
return address.port;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private onNewConnection(socket: Socket) {
|
||||
const parser = new TrampolineCommandParser();
|
||||
|
||||
// Messages coming from the trampoline client will be separated by \0
|
||||
socket.pipe(split2(/\0/)).on('data', (data: Buffer) => {
|
||||
this.onDataReceived(socket, parser, data);
|
||||
});
|
||||
|
||||
socket.on('error', this.onClientError);
|
||||
}
|
||||
|
||||
private onDataReceived(socket: Socket, parser: TrampolineCommandParser, data: Buffer) {
|
||||
const value = data.toString('utf8');
|
||||
parser.processValue(value);
|
||||
|
||||
if (!parser.hasFinished()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const command = parser.toCommand();
|
||||
if (command === null) {
|
||||
socket.end();
|
||||
return;
|
||||
}
|
||||
|
||||
this.processCommand(socket, command);
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a handler for commands with a specific identifier. This will be
|
||||
* invoked when the server receives a command with the given identifier.
|
||||
*
|
||||
* @param identifier Identifier of the command.
|
||||
* @param handler Handler to register.
|
||||
*/
|
||||
private registerCommandHandler(identifier: TrampolineCommandIdentifier, handler: TrampolineCommandHandler) {
|
||||
this.commandHandlers.set(identifier, handler);
|
||||
}
|
||||
|
||||
private async processCommand(socket: Socket, command: ITrampolineCommand) {
|
||||
if (!isValidTrampolineToken(command.trampolineToken)) {
|
||||
throw new Error('Tried to use invalid trampoline token');
|
||||
}
|
||||
|
||||
const handler = this.commandHandlers.get(command.identifier);
|
||||
|
||||
if (handler === undefined) {
|
||||
socket.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await handler(command);
|
||||
|
||||
if (result !== undefined) {
|
||||
socket.end(result);
|
||||
} else {
|
||||
socket.end();
|
||||
}
|
||||
}
|
||||
|
||||
private onServerError = (error: Error) => {
|
||||
sendNonFatalException('trampolineServer', error);
|
||||
this.close();
|
||||
};
|
||||
|
||||
private onClientError = (error: Error) => {
|
||||
console.error('Trampoline client error', error);
|
||||
};
|
||||
}
|
||||
|
||||
export const trampolineServer = new TrampolineServer();
|
||||
53
packages/noodl-git/src/core/trampoline/trampoline-tokens.ts
Normal file
53
packages/noodl-git/src/core/trampoline/trampoline-tokens.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
// TODO: Move to platform package
|
||||
function guid() {
|
||||
function s4() {
|
||||
return Math.floor((1 + Math.random()) * 0x10000)
|
||||
.toString(16)
|
||||
.substring(1);
|
||||
}
|
||||
return (
|
||||
s4() +
|
||||
s4() +
|
||||
"-" +
|
||||
s4() +
|
||||
"-" +
|
||||
s4() +
|
||||
"-" +
|
||||
s4() +
|
||||
"-" +
|
||||
s4() +
|
||||
s4() +
|
||||
s4()
|
||||
);
|
||||
}
|
||||
|
||||
const trampolineTokens = new Set<string>();
|
||||
|
||||
/** Checks if a given trampoline token is valid. */
|
||||
export function isValidTrampolineToken(token: string) {
|
||||
return trampolineTokens.has(token);
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows invoking a function with a short-lived trampoline token that will be
|
||||
* revoked right after the function finishes.
|
||||
*
|
||||
* @param fn Function to invoke with the trampoline token.
|
||||
*/
|
||||
export async function withTrampolineToken<T>(
|
||||
fn: (token: string) => Promise<T>
|
||||
): Promise<T> {
|
||||
let result: T;
|
||||
|
||||
// Create a unique token for this request
|
||||
const token = guid();
|
||||
trampolineTokens.add(token);
|
||||
|
||||
try {
|
||||
result = await fn(token);
|
||||
} finally {
|
||||
trampolineTokens.delete(token);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
BIN
packages/noodl-git/src/core/trampoline/trampoline.png
Normal file
BIN
packages/noodl-git/src/core/trampoline/trampoline.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 52 KiB |
BIN
packages/noodl-git/src/core/trampoline/trampoline.vsdx
Normal file
BIN
packages/noodl-git/src/core/trampoline/trampoline.vsdx
Normal file
Binary file not shown.
158
packages/noodl-git/src/core/update-index.ts
Normal file
158
packages/noodl-git/src/core/update-index.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import { git } from './client';
|
||||
import { DiffSelectionType } from './models/diff';
|
||||
import { applyPatchToIndex } from './apply';
|
||||
import { WorkingDirectoryFileChange, FileStatusKind } from './models/status';
|
||||
|
||||
interface IUpdateIndexOptions {
|
||||
/**
|
||||
* Whether or not to add a file when it exists in the working directory
|
||||
* but not in the index. Defaults to true (note that this differs from the
|
||||
* default behavior of Git which is to ignore new files).
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
add?: boolean;
|
||||
|
||||
/**
|
||||
* Whether or not to remove a file when it exists in the index but not
|
||||
* in the working directory. Defaults to true (note that this differs from
|
||||
* the default behavior of Git which is to ignore removed files).
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
remove?: boolean;
|
||||
|
||||
/**
|
||||
* Whether or not to forcefully remove a file from the index even though it
|
||||
* exists in the working directory. This implies remove.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
forceRemove?: boolean;
|
||||
|
||||
/**
|
||||
* Whether or not to replace conflicting entries in the index with that of
|
||||
* the working directory. Imagine the following scenario
|
||||
*
|
||||
* $ touch foo && git update-index --add foo && git commit -m 'foo'
|
||||
* $ rm foo && mkdir foo && echo "bar" > foo/bar
|
||||
* $ git update-index --add foo/bar
|
||||
* error: 'foo/bar' appears as both a file and as a directory
|
||||
* error: foo/bar: cannot add to the index - missing --add option?
|
||||
* fatal: Unable to process path foo/bar
|
||||
*
|
||||
* Replace ignores this conflict and overwrites the index with the
|
||||
* newly created directory, causing the original foo file to be deleted
|
||||
* in the index. This behavior matches what `git add` would do in a similar
|
||||
* scenario.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
replace?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the index with file contents from the working tree. This method
|
||||
* is a noop when no paths are provided.
|
||||
*
|
||||
* @param paths A list of paths which are to be updated with file contents and
|
||||
* status from the working directory.
|
||||
*
|
||||
* @param options See the IUpdateIndexOptions interface for more details.
|
||||
*/
|
||||
async function updateIndex(filePath: string, paths: ReadonlyArray<string>, options: IUpdateIndexOptions = {}) {
|
||||
if (paths.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const args = ['update-index'];
|
||||
|
||||
if (options.add !== false) {
|
||||
args.push('--add');
|
||||
}
|
||||
|
||||
if (options.remove !== false || options.forceRemove === true) {
|
||||
args.push('--remove');
|
||||
}
|
||||
|
||||
if (options.forceRemove) {
|
||||
args.push('--force-remove');
|
||||
}
|
||||
|
||||
if (options.replace !== false) {
|
||||
args.push('--replace');
|
||||
}
|
||||
|
||||
args.push('-z', '--stdin');
|
||||
|
||||
await git(args, filePath, 'updateIndex', {
|
||||
stdin: paths.join('\0')
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Stage all the given files by either staging the entire path or by applying
|
||||
* a patch.
|
||||
*
|
||||
* Note that prior to stageFiles the index has been completely reset,
|
||||
* the job of this function is to set up the index in such a way that it
|
||||
* reflects what the user has selected in the app.
|
||||
*/
|
||||
export async function stageFiles(filePath: string, files: ReadonlyArray<WorkingDirectoryFileChange>): Promise<void> {
|
||||
const normal = [];
|
||||
const oldRenamed = [];
|
||||
const partial = [];
|
||||
const deletedFiles = [];
|
||||
|
||||
for (const file of files) {
|
||||
if (file.selection.getSelectionType() === DiffSelectionType.All) {
|
||||
normal.push(file.path);
|
||||
if (file.status.kind === FileStatusKind.Renamed) {
|
||||
oldRenamed.push(file.status.oldPath);
|
||||
} else if (file.status.kind === FileStatusKind.Deleted) {
|
||||
deletedFiles.push(file.path);
|
||||
}
|
||||
} else {
|
||||
partial.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
// Staging files happens in three steps.
|
||||
//
|
||||
// In the first step we run through all of the renamed files, or
|
||||
// more specifically the source files (old) that were renamed and
|
||||
// forcefully remove them from the index. We do this in order to handle
|
||||
// the scenario where a file has been renamed and a new file has been
|
||||
// created in its original position. Think of it like this
|
||||
//
|
||||
// $ touch foo && git add foo && git commit -m 'foo'
|
||||
// $ git mv foo bar
|
||||
// $ echo "I'm a new foo" > foo
|
||||
//
|
||||
// Now we have a file which is of type Renamed that has its path set
|
||||
// to 'bar' and its oldPath set to 'foo'. But there's a new file called
|
||||
// foo in the repository. So if the user selects the 'foo -> bar' change
|
||||
// but not the new 'foo' file for inclusion in this commit we don't
|
||||
// want to add the new 'foo', we just want to recreate the move in the
|
||||
// index. We do this by forcefully removing the old path from the index
|
||||
// and then later (in step 2) stage the new file.
|
||||
await updateIndex(filePath, oldRenamed, { forceRemove: true });
|
||||
|
||||
// In the second step we update the index to match
|
||||
// the working directory in the case of new, modified, deleted,
|
||||
// and copied files as well as the destination paths for renamed
|
||||
// paths.
|
||||
await updateIndex(filePath, normal);
|
||||
|
||||
// This third step will only happen if we have files that have been marked
|
||||
// for deletion. This covers us for files that were blown away in the last
|
||||
// updateIndex call
|
||||
await updateIndex(filePath, deletedFiles, { forceRemove: true });
|
||||
|
||||
// Finally we run through all files that have partial selections.
|
||||
// We don't care about renamed or not here since applyPatchToIndex
|
||||
// has logic to support that scenario.
|
||||
for (const file of partial) {
|
||||
await applyPatchToIndex(filePath, file);
|
||||
}
|
||||
}
|
||||
49
packages/noodl-git/src/core/update-ref.ts
Normal file
49
packages/noodl-git/src/core/update-ref.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { git } from "./client";
|
||||
|
||||
/**
|
||||
* Update the ref to a new value.
|
||||
*
|
||||
* @param repositoryDir - The repository in which the ref exists.
|
||||
* @param ref - The ref to update. Must be fully qualified
|
||||
* (e.g., `refs/heads/NAME`).
|
||||
* @param oldValue - The value we expect the ref to have currently. If it
|
||||
* doesn't match, the update will be aborted.
|
||||
* @param newValue - The new value for the ref.
|
||||
* @param reason - The reflog entry.
|
||||
*/
|
||||
export async function updateRef(
|
||||
repositoryDir: string,
|
||||
ref: string,
|
||||
oldValue: string,
|
||||
newValue: string,
|
||||
reason: string
|
||||
): Promise<void> {
|
||||
await git(
|
||||
["update-ref", ref, newValue, oldValue, "-m", reason],
|
||||
repositoryDir,
|
||||
"updateRef"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a ref.
|
||||
*
|
||||
* @param repositoryDir - The repository in which the ref exists.
|
||||
* @param ref - The ref to remove. Should be fully qualified, but may also be 'HEAD'.
|
||||
* @param reason - The reflog entry (optional). Note that this is only useful when
|
||||
* deleting the HEAD reference as deleting any other reference will
|
||||
* implicitly delete the reflog file for that reference as well.
|
||||
*/
|
||||
export async function deleteRef(
|
||||
repositoryDir: string,
|
||||
ref: string,
|
||||
reason?: string
|
||||
) {
|
||||
const args = ["update-ref", "-d", ref];
|
||||
|
||||
if (reason !== undefined) {
|
||||
args.push("-m", reason);
|
||||
}
|
||||
|
||||
return await git(args, repositoryDir, "deleteRef");
|
||||
}
|
||||
965
packages/noodl-git/src/git.ts
Normal file
965
packages/noodl-git/src/git.ts
Normal file
@@ -0,0 +1,965 @@
|
||||
import { removeSync } from 'fs-extra';
|
||||
import { sortBy } from 'underscore';
|
||||
|
||||
import { createErrorFromMessage, fetch, GitActionError, GitActionErrorCode } from './actions';
|
||||
import { getRemote, setRemoteURL } from './actions/remote';
|
||||
import { DEFAULT_BRANCH } from './constants';
|
||||
import { addAll } from './core/add';
|
||||
import { appendGitAttributes } from './core/attributes';
|
||||
import { currentBranchName, createBranch } from './core/branch';
|
||||
import { checkoutBranch } from './core/checkout';
|
||||
import { cleanUntrackedFiles } from './core/clean';
|
||||
import { git } from './core/client';
|
||||
import { clone } from './core/clone';
|
||||
import { createCommit } from './core/commit';
|
||||
import { getConfigValue, setConfigValue } from './core/config';
|
||||
import { getBranchesOld } from './core/for-each-ref';
|
||||
import { appendGitIgnore } from './core/ignore';
|
||||
import { init, installMergeDriver } from './core/init';
|
||||
import { getChangedFiles, getCommits } from './core/logs';
|
||||
import { merge, getMergeBase, mergeTree, mergeTreeCommit } from './core/merge';
|
||||
import { BranchType } from './core/models/branch';
|
||||
import { ComputedAction } from './core/models/computed-action';
|
||||
import { IFetchProgress } from './core/models/progress';
|
||||
import { Commit } from './core/models/snapshot';
|
||||
import { open } from './core/open';
|
||||
import { push, pushDelete } from './core/push';
|
||||
import { refhead } from './core/refs';
|
||||
import { getRemotes, addRemote, getRemoteURL } from './core/remotes';
|
||||
import { GitResetMode, reset } from './core/reset';
|
||||
import { getAheadBehind, revRange, revSymmetricDifference } from './core/rev-list';
|
||||
import { getAllTags } from './core/show-ref';
|
||||
import { popStashEntry, createStashEntry, getStashes, popStashEntryToBranch } from './core/stash';
|
||||
import { getStatus } from './core/status';
|
||||
import { deleteRef } from './core/update-ref';
|
||||
import { cleanMergeDriverOptionsSync, writeMergeDriverOptions } from './merge-driver';
|
||||
import { MergeStrategy, MergeStrategyFunc } from './merge-strategy';
|
||||
import {
|
||||
GitStatus,
|
||||
GitCommit,
|
||||
GitBranch,
|
||||
GitCloneOptions,
|
||||
ConvertStatusKindToGitStatus,
|
||||
GitEmptyRepositoryError
|
||||
} from './models';
|
||||
|
||||
export enum CommitState {
|
||||
NONE = 0,
|
||||
LOCAL = 1 << 0,
|
||||
REMOTE = 1 << 1
|
||||
}
|
||||
|
||||
export interface FetchOptions {
|
||||
onProgress?: (progress: IFetchProgress) => void;
|
||||
}
|
||||
|
||||
export interface PullOptions {
|
||||
onProgress?: (progress: IFetchProgress) => void;
|
||||
}
|
||||
|
||||
export function createSquashMessage(branchName: string) {
|
||||
return `Squashed commit from branch '${branchName}'`;
|
||||
}
|
||||
|
||||
export function createMergeMessage(theirsBranchName: string, oursBranchName: string): string {
|
||||
return `Merge ${theirsBranchName} into ${oursBranchName}`;
|
||||
}
|
||||
|
||||
export type GitProvider = 'noodl' | 'github' | 'unknown' | 'none';
|
||||
|
||||
/**
|
||||
* NOTE: Don't change the methods here, they are the same as in Noodl Editor right now!
|
||||
* NOTE: I have added some return values in some places where there were none before.
|
||||
*/
|
||||
export class Git {
|
||||
public get repositoryPath() {
|
||||
return this.baseDir;
|
||||
}
|
||||
|
||||
private baseDir: string | null = null;
|
||||
private originProvider: GitProvider | null = null;
|
||||
private originUrl: string = null;
|
||||
|
||||
get Provider() {
|
||||
return this.originProvider;
|
||||
}
|
||||
|
||||
get OriginUrl() {
|
||||
return this.originUrl;
|
||||
}
|
||||
|
||||
constructor(private readonly mergeProject: MergeStrategyFunc) {}
|
||||
|
||||
/**
|
||||
* Initialize a new git repository in the given path.
|
||||
*
|
||||
* @param baseDir
|
||||
*/
|
||||
async initNewRepo(baseDir: string, options?: { bare: boolean }): Promise<void> {
|
||||
if (this.baseDir) return;
|
||||
|
||||
this.baseDir = await init(baseDir, options);
|
||||
await this._setupRepository();
|
||||
}
|
||||
|
||||
/**
|
||||
* Open a git repository in the given path.
|
||||
*
|
||||
* @param baseDir
|
||||
*/
|
||||
async openRepository(baseDir: string): Promise<void> {
|
||||
if (this.baseDir) return;
|
||||
|
||||
this.baseDir = await open(baseDir);
|
||||
await this._setupRepository();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone a new repository.
|
||||
*
|
||||
* @param options
|
||||
*/
|
||||
async clone({ url, directory, singleBranch, onProgress }: GitCloneOptions): Promise<void> {
|
||||
if (this.baseDir) return;
|
||||
|
||||
async function _clone() {
|
||||
try {
|
||||
return await clone(url, directory, { branch: 'main', singleBranch }, (progress) => {
|
||||
onProgress && onProgress(progress);
|
||||
});
|
||||
} catch (exc) {
|
||||
try {
|
||||
return await clone(url, directory, { branch: 'master', singleBranch }, (progress) => {
|
||||
onProgress && onProgress(progress);
|
||||
});
|
||||
} catch (exc) {
|
||||
throw 'warning: You appear to have cloned an empty repository.';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await _clone();
|
||||
|
||||
this.baseDir = directory;
|
||||
await this._setupRepository();
|
||||
} catch (exc) {
|
||||
if (exc && exc.toString().includes('warning: You appear to have cloned an empty repository.')) {
|
||||
// Remove the directory when it fails
|
||||
removeSync(directory);
|
||||
throw new GitEmptyRepositoryError();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List the remotes, sorted alphabetically by `name`, for a repository.
|
||||
*
|
||||
* @returns
|
||||
*/
|
||||
async getRemoteName(): Promise<string> {
|
||||
const remotes = await getRemotes(this.baseDir);
|
||||
return remotes.length ? remotes[0].name : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new remote with the given URL.
|
||||
*
|
||||
* @param remoteURL
|
||||
*/
|
||||
async addRemote(remoteURL: string): Promise<void> {
|
||||
await addRemote(this.baseDir, 'origin', remoteURL);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param message
|
||||
* @returns Commit SHA
|
||||
*/
|
||||
async commit(message: string): Promise<string> {
|
||||
// Cannot commit when there are no changes
|
||||
const status = await this.status();
|
||||
if (status.length === 0) throw new Error('Cannot commit without any local changes.');
|
||||
|
||||
// Stage all changes, and untracked files
|
||||
// NOTE: This can be done with the commit command
|
||||
await addAll(this.baseDir);
|
||||
|
||||
// Commit the changes
|
||||
return await createCommit(this.baseDir, message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the status of the current git state.
|
||||
*
|
||||
* @returns
|
||||
*/
|
||||
async status(): Promise<GitStatus[]> {
|
||||
const statusList = await getStatus(this.baseDir);
|
||||
return statusList.files.map((x) => ({
|
||||
status: ConvertStatusKindToGitStatus(x.status.kind),
|
||||
path: x.path
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Stash the local changes.
|
||||
*
|
||||
* NOTE: This doesn't check if there are any changes.
|
||||
*/
|
||||
async stashPushChanges(branchName?: string) {
|
||||
const useBranchName = branchName ?? (await currentBranchName(this.baseDir));
|
||||
const stashMarker = `!!Noodl<${useBranchName}>`;
|
||||
return await createStashEntry(this.baseDir, stashMarker);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pop the stash.
|
||||
*/
|
||||
async stashPopChanges(branchName?: string): Promise<boolean> {
|
||||
const useBranchName = branchName ?? (await currentBranchName(this.baseDir));
|
||||
const stashMarker = `!!Noodl<${useBranchName}>`;
|
||||
|
||||
const stashes = await getStashes(this.baseDir);
|
||||
const stash = stashes.entries.find((x) => x.message === stashMarker);
|
||||
if (!stash) {
|
||||
return false;
|
||||
}
|
||||
|
||||
await writeMergeDriverOptions({
|
||||
reversed: true
|
||||
});
|
||||
|
||||
try {
|
||||
// Pop and then we do the merge check
|
||||
await popStashEntry(this.baseDir, stash.name);
|
||||
} catch (err) {
|
||||
// Run merge on untracked files, we are not able to pop the stash when there are untracked files.
|
||||
// The solution to this is to pop the stash to a new branch, which is done via "git stash branch".
|
||||
//
|
||||
// This means that we can handle it as 2 different branches making it easier to merge.
|
||||
//
|
||||
// OKAY, GOOD LUCK! BE READY!
|
||||
//
|
||||
// So here is a story all about how I screwed up git for a very long time.
|
||||
// Lets start with a stash and then pull in some changes.
|
||||
// Then we pop our stash, but its all about untracked changes.
|
||||
// So we created a branch called !!Noodl-Stash-... with all has our (local) changes
|
||||
// Then we merge in the remote changes to our new branch without any problems
|
||||
// So I checked out the branch we are working on
|
||||
// And the squash merge all the files from without a commit in our new !!Noodl-Stash
|
||||
//
|
||||
// Yeah it doesn't really work in the end, but maybe it helps a little? Maybe makes you happy?
|
||||
//
|
||||
if (err.toString().includes('could not restore untracked files from stash')) {
|
||||
const previousBranch = await this.getCurrentBranchName();
|
||||
const stashBranchName = `!!Noodl-Stash-${stash.branchName}`;
|
||||
|
||||
// Create a new branch from the stash
|
||||
// this will also checkout the branch
|
||||
await popStashEntryToBranch(this.baseDir, stash.name, stashBranchName);
|
||||
|
||||
// Merge our working branch into the stash branch
|
||||
await this._merge({
|
||||
theirsBranchName: previousBranch,
|
||||
oursBranchName: stashBranchName,
|
||||
isSquash: false,
|
||||
message: undefined,
|
||||
allowFastForward: false
|
||||
});
|
||||
|
||||
const changes = await this.status();
|
||||
if (changes.length > 0) {
|
||||
await this.commit('Merge stash');
|
||||
}
|
||||
|
||||
// TODO: Should we make sure there are no issues?
|
||||
|
||||
// Checkout the working branch
|
||||
await this.checkoutBranch(previousBranch);
|
||||
|
||||
// Squash merge our stash branch into the working branch without making any commits.
|
||||
await merge(this.baseDir, stashBranchName, {
|
||||
strategy: 'recursive',
|
||||
strategyOption: 'theirs',
|
||||
isSquash: true,
|
||||
squashNoCommit: true,
|
||||
message: undefined,
|
||||
noFastForward: true
|
||||
});
|
||||
|
||||
// Delete the stash branch
|
||||
await this.deleteBranch(stashBranchName);
|
||||
|
||||
// And what should be left on the working branch is our stash, that we love so much!
|
||||
} else {
|
||||
// We failed to pop the stash, this shouldn't happen, but we just log the error and return false.
|
||||
console.error(err);
|
||||
return false;
|
||||
}
|
||||
} finally {
|
||||
cleanMergeDriverOptionsSync();
|
||||
}
|
||||
|
||||
// Get merge information
|
||||
const headCommitish = await this.getHeadCommitId();
|
||||
const tree = await mergeTreeCommit(this.baseDir, stash.sha, headCommitish);
|
||||
|
||||
// Check if there is a merge conflict
|
||||
if (tree.kind === ComputedAction.Conflicts) {
|
||||
// Solve any conflicts if there are any after reapplying the stash
|
||||
// NOTE(?): "ours" and "theirs" are reveresed, since our changes are the incoming one from the stash
|
||||
const solver = new MergeStrategy(this.baseDir, this.mergeProject);
|
||||
await solver.solveConflicts(tree);
|
||||
} else if (tree.kind !== ComputedAction.Clean) {
|
||||
throw new Error('Failed to merge stash, ' + tree.kind);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param options
|
||||
*/
|
||||
async mergeToCurrentBranch(theirsBranchName: string, squash = true, message: string = undefined): Promise<void> {
|
||||
return this._mergeToCurrentBranch({
|
||||
theirsBranchName,
|
||||
squash,
|
||||
message
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the current branch to HEAD.
|
||||
*
|
||||
* This will delete all local changes and
|
||||
* checkout the HEAD branch if it exists.
|
||||
*/
|
||||
async resetToHead(): Promise<void> {
|
||||
// Get the current branch
|
||||
const branchName = await currentBranchName(this.baseDir);
|
||||
const branches = await getBranchesOld(this.baseDir, branchName);
|
||||
const branch = branches.find((x) => x.name === branchName);
|
||||
|
||||
// Delete all the untracked files
|
||||
await cleanUntrackedFiles(this.baseDir);
|
||||
|
||||
// Delete all local changes
|
||||
await reset(this.baseDir, GitResetMode.Hard, 'HEAD');
|
||||
|
||||
// NOTE: Only checkout the branch if it exists
|
||||
if (branch) {
|
||||
await checkoutBranch(this.baseDir, branch, { force: true });
|
||||
}
|
||||
}
|
||||
|
||||
async resetToCommitWithId(id: string): Promise<void> {
|
||||
await reset(this.baseDir, GitResetMode.Hard, id);
|
||||
await cleanUntrackedFiles(this.baseDir);
|
||||
}
|
||||
|
||||
async resetToMergeBase(): Promise<void> {
|
||||
const headCommitId = await this.getHeadCommitId();
|
||||
const remoteHeadCommitId = await this.getRemoteHeadCommitId();
|
||||
if (!remoteHeadCommitId) {
|
||||
throw new Error('No remote commit to base from');
|
||||
}
|
||||
|
||||
const mergeBaseId = await this.getMergeBaseCommitId(headCommitId, remoteHeadCommitId);
|
||||
await this.resetToCommitWithId(mergeBaseId);
|
||||
await cleanUntrackedFiles(this.baseDir);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @deprecated This is only used in old git panel
|
||||
*
|
||||
* @param options
|
||||
* @returns
|
||||
*/
|
||||
async push(options?: { onProgress?: (args: { percent: number }) => void }): Promise<boolean> {
|
||||
const currentBranchName = await this.getCurrentBranchName();
|
||||
const remoteBranchName = currentBranchName;
|
||||
|
||||
const remote = await getRemote(this.baseDir);
|
||||
|
||||
try {
|
||||
return await push(this.baseDir, remote, currentBranchName, remoteBranchName, [], undefined, (progress) => {
|
||||
if (options?.onProgress) {
|
||||
options.onProgress({
|
||||
percent: progress.value
|
||||
});
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
const message = error.toString();
|
||||
if (message.includes('Updates were rejected because the remote contains work that you do')) {
|
||||
throw new Error(
|
||||
'Updates were rejected because there are new changes that you do not have locally. Pull to get the latest changes.'
|
||||
);
|
||||
}
|
||||
throw createErrorFromMessage(message);
|
||||
}
|
||||
}
|
||||
|
||||
async fetch({ onProgress }: FetchOptions): Promise<void> {
|
||||
// Check remote origin where it is hosted
|
||||
const remoteName = await this.getRemoteName();
|
||||
const remoteUrl = await getRemoteURL(this.repositoryPath, remoteName);
|
||||
|
||||
this.originUrl = remoteUrl?.trim();
|
||||
this.originProvider = this.getProviderForRemote(remoteUrl);
|
||||
|
||||
if (!remoteUrl) {
|
||||
throw new GitActionError(GitActionErrorCode.LocalRepository);
|
||||
}
|
||||
|
||||
// Fetch
|
||||
const remote = await getRemote(this.baseDir);
|
||||
await fetch(this.baseDir, remote, onProgress);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns
|
||||
*/
|
||||
async hasRemoteCommits(): Promise<boolean> {
|
||||
const remoteName = await this.getRemoteName();
|
||||
const tags = await getAllTags(this.baseDir);
|
||||
|
||||
const refDefault = `refs/remotes/${remoteName}/${DEFAULT_BRANCH}`;
|
||||
|
||||
// Backwards compatability
|
||||
const ref_master = `refs/remotes/${remoteName}/master`;
|
||||
|
||||
return tags.has(refDefault) || tags.has(ref_master);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of all commits.
|
||||
*/
|
||||
async getCommitsCurrentBranch(): Promise<GitCommit[]> {
|
||||
const limit = 100;
|
||||
|
||||
const localGitCommits = await this.getCommits(undefined, limit);
|
||||
let commits: GitCommit[] = localGitCommits.map(this.mapCommit);
|
||||
|
||||
const remoteHeadId = await this.getRemoteHeadCommitId();
|
||||
if (remoteHeadId) {
|
||||
const currentBranchName = await this.getCurrentBranchName();
|
||||
const remoteGitCommits = await getCommits(this.baseDir, `origin/${currentBranchName}`, limit, undefined);
|
||||
|
||||
if (commits.length > 0 && remoteGitCommits.length > 0) {
|
||||
const aheadBehind = await this.aheadBehind(commits[0].sha, remoteHeadId);
|
||||
//note: aheadBehind.ahead can be more than commits.length, primarily after merging a branch with lots of commits
|
||||
for (let i = 0; i < Math.min(commits.length, aheadBehind.ahead); i++) {
|
||||
commits[i].isLocalAhead = true;
|
||||
}
|
||||
}
|
||||
|
||||
const remoteOnlyCommits = remoteGitCommits
|
||||
.filter((x) => localGitCommits.findIndex((c) => c.sha === x.sha) === -1)
|
||||
.map((x) => ({ ...this.mapCommit(x), isRemoteAhead: true }));
|
||||
|
||||
commits = remoteOnlyCommits.concat(commits);
|
||||
} else {
|
||||
// there is no remote, it's a local branch
|
||||
// flag all the commits as being "ahead" of
|
||||
// the remote (even the commit that was the branching points, and commits before it)
|
||||
commits.forEach((c) => (c.isLocalAhead = true));
|
||||
}
|
||||
|
||||
return commits;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the commits between the two commits excluding the base commit.
|
||||
*
|
||||
* NOTE: It's very important that the commit sha's are in the right order.
|
||||
*
|
||||
* @param baseCommitId
|
||||
* @param targetCommitId
|
||||
* @returns
|
||||
*/
|
||||
async getCommitsBetween(baseCommitId: string, targetCommitId: string): Promise<GitCommit[]> {
|
||||
return (await this.getCommits(revSymmetricDifference(baseCommitId, targetCommitId), 100)).map(this.mapCommit);
|
||||
}
|
||||
|
||||
async getCommitFromId(id: string): Promise<Commit | null> {
|
||||
const commits = await this.getCommits(id, 1);
|
||||
const commit = commits.length > 0 ? commits[0] : null;
|
||||
return new Commit(
|
||||
this.baseDir,
|
||||
commit.sha,
|
||||
commit.shortSha,
|
||||
commit.summary,
|
||||
commit.body,
|
||||
commit.author,
|
||||
commit.committer,
|
||||
commit.parentSHAs,
|
||||
commit.tags
|
||||
);
|
||||
}
|
||||
|
||||
async getMergeBaseCommitId(id1: string, id2: string): Promise<string> {
|
||||
return await getMergeBase(this.baseDir, id1, id2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the HEAD SHA of the current branch.
|
||||
*
|
||||
* @returns
|
||||
*/
|
||||
async getHeadCommitId(): Promise<string | null> {
|
||||
const commit = await refhead(this.baseDir);
|
||||
return commit ? commit : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the remote latest SHA of the current branch.
|
||||
*
|
||||
* TODO: This is not a HEAD
|
||||
*/
|
||||
async getRemoteHeadCommitId(): Promise<string | null> {
|
||||
const remoteName = await this.getRemoteName();
|
||||
const branchName = await this.getCurrentBranchName();
|
||||
const tags = await getAllTags(this.baseDir);
|
||||
const ref = `refs/remotes/${remoteName}/${branchName}`;
|
||||
|
||||
return tags.has(ref) ? tags.get(ref) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param branchName
|
||||
*/
|
||||
async getHeadCommitOnBranch(branchName: string): Promise<string | null> {
|
||||
const branches = await getBranchesOld(this.baseDir);
|
||||
const branch = branches.find((x) => x.name === branchName);
|
||||
return branch?.tip?.sha;
|
||||
}
|
||||
|
||||
async currentAheadBehind(): Promise<{ ahead: number; behind: number }> {
|
||||
const currentLocalCommitId = await this.getHeadCommitId();
|
||||
const currentRemoteCommitId = await this.getRemoteHeadCommitId();
|
||||
return await this.aheadBehind(currentLocalCommitId, currentRemoteCommitId);
|
||||
}
|
||||
|
||||
async aheadBehind(localCommitId: string, upstreamCommitId: string): Promise<{ ahead: number; behind: number }> {
|
||||
if (upstreamCommitId) {
|
||||
const { ahead, behind } = await getAheadBehind(
|
||||
this.baseDir,
|
||||
revSymmetricDifference(localCommitId, upstreamCommitId)
|
||||
);
|
||||
return { ahead, behind };
|
||||
} else {
|
||||
const { ahead, behind } = await getAheadBehind(this.baseDir, localCommitId);
|
||||
return { ahead, behind };
|
||||
}
|
||||
}
|
||||
|
||||
async getFileDiff(baseCommitId: string, targetCommitId: string): Promise<GitStatus[]> {
|
||||
const changes = await getChangedFiles(this.baseDir, revRange(baseCommitId, targetCommitId));
|
||||
return changes.files.map((x) => ({
|
||||
status: ConvertStatusKindToGitStatus(x.status.kind),
|
||||
path: x.path
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new branch from the HEAD branch.
|
||||
*
|
||||
* @param name
|
||||
*/
|
||||
async createBranchFromHead(name: string): Promise<void> {
|
||||
const headCommitId = await this.getHeadCommitId();
|
||||
await createBranch(this.baseDir, name, headCommitId);
|
||||
}
|
||||
|
||||
/**
|
||||
* This will:
|
||||
* 1. Create a new branch
|
||||
* 2. Stash the current changes
|
||||
* 3. Checkout the new branch
|
||||
* 4. Pop the stash changes
|
||||
*
|
||||
* @param branchName
|
||||
*/
|
||||
async createAndCheckoutBranch(branchName: string): Promise<void> {
|
||||
const currentBranchName = await this.getCurrentBranchName();
|
||||
|
||||
// Check if branch already exists
|
||||
const branches = await getBranchesOld(this.baseDir);
|
||||
if (branches.find((x) => x.name === branchName)) throw new Error('Branch already exists');
|
||||
|
||||
// The branch name can be invalid
|
||||
try {
|
||||
await this.createBranchFromHead(branchName);
|
||||
} catch (err) {
|
||||
const message = err.toString();
|
||||
if (message.includes('fatal: cannot lock ref') || message.includes('is not a valid branch name')) {
|
||||
throw new GitActionError(GitActionErrorCode.InvalidBranchName);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
|
||||
// Stash changes if there are any local changes
|
||||
const status = await this.status();
|
||||
const needsStash = status.length > 0;
|
||||
|
||||
if (needsStash) {
|
||||
await this.stashPushChanges(currentBranchName);
|
||||
}
|
||||
|
||||
try {
|
||||
await this.checkoutBranch(branchName);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
} finally {
|
||||
if (needsStash) {
|
||||
await this.stashPopChanges(currentBranchName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param name
|
||||
*/
|
||||
async checkoutRemoteBranch(name: string): Promise<void> {
|
||||
const remoteName = await this.getRemoteName();
|
||||
const refName = `refs/remotes/${remoteName}/${name}`;
|
||||
|
||||
const branches = await getBranchesOld(this.baseDir);
|
||||
const remoteRef = branches.find((x) => x.ref === refName);
|
||||
|
||||
await this.checkoutBranch(remoteRef.name);
|
||||
}
|
||||
|
||||
/**
|
||||
* This will:
|
||||
* 1. Stash changes
|
||||
* 2. Checkout the branch
|
||||
* 3. Pop the stash changes (from the new branch; if there are any)
|
||||
*
|
||||
* @param name
|
||||
*/
|
||||
async checkoutBranch(name: string, commitish?: string): Promise<void> {
|
||||
// Find the branch we want to checkout
|
||||
const branches = await getBranchesOld(this.baseDir);
|
||||
const branch = branches.find((x) => x.name === name);
|
||||
if (!branch) throw new Error('Branch not found');
|
||||
|
||||
// 1. Stash changes if there are any local changes
|
||||
const status = await this.status();
|
||||
if (status.length > 0) {
|
||||
await this.stashPushChanges();
|
||||
}
|
||||
|
||||
// 2. Checkout the branch
|
||||
await checkoutBranch(this.baseDir, branch, {
|
||||
force: false,
|
||||
commitish
|
||||
});
|
||||
|
||||
// 3.
|
||||
try {
|
||||
await this.stashPopChanges();
|
||||
} catch {
|
||||
// if there is no stash the we don't care.
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current branch name.
|
||||
*/
|
||||
async getCurrentBranchName(): Promise<string> {
|
||||
const branchName = await currentBranchName(this.baseDir);
|
||||
return branchName.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all the branches.
|
||||
*
|
||||
* Technical: This merges all the branches (local/remote) into one list.
|
||||
*
|
||||
* @deprecated This is only used in old git panel
|
||||
*/
|
||||
async getBranches(): Promise<GitBranch[]> {
|
||||
const branches = await getBranchesOld(this.baseDir);
|
||||
|
||||
const allBranches = branches
|
||||
.filter((x) => x.type === BranchType.Local)
|
||||
.map((x) => {
|
||||
return {
|
||||
name: x.name,
|
||||
local: true,
|
||||
remote: false
|
||||
};
|
||||
});
|
||||
|
||||
// Update all the branches with the remote branches
|
||||
// There can be cases where you have 2 branches with the same name, just one is on a different remote.
|
||||
// Could also be cases where there are even more than just 2... Take care!
|
||||
branches
|
||||
.filter((x) => x.type === BranchType.Remote)
|
||||
.forEach((x) => {
|
||||
// For example: "origin/A"
|
||||
// In the UI we just want to display the branch name, not the remote.
|
||||
let split = x.name.split('/');
|
||||
if (split.length > 1) {
|
||||
split = split.slice(1);
|
||||
}
|
||||
const branchName = split.join('/');
|
||||
|
||||
const hitIndex = allBranches.findIndex((x) => x.name === branchName);
|
||||
if (hitIndex !== -1) {
|
||||
allBranches[hitIndex].remote = true;
|
||||
} else {
|
||||
allBranches.push({
|
||||
name: branchName,
|
||||
local: false,
|
||||
remote: true
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return sortBy(allBranches, (x) => x.name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a specific branch.
|
||||
*
|
||||
* @param name
|
||||
*/
|
||||
async deleteBranch(name: string): Promise<void> {
|
||||
const branches = await getBranchesOld(this.baseDir);
|
||||
|
||||
const branch = branches.find((x) => x.name === name && x.type === BranchType.Local);
|
||||
if (!branch) throw new Error(`Branch doesn't exist`);
|
||||
|
||||
await deleteRef(this.baseDir, branch.ref);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param name
|
||||
*/
|
||||
async deleteRemoteBranch(name: string): Promise<void> {
|
||||
const remote = await getRemote(this.baseDir);
|
||||
const refName = `refs/remotes/${remote.name}/${name}`;
|
||||
|
||||
const branches = await getBranchesOld(this.baseDir);
|
||||
const branch = branches.find((x) => x.ref === refName && x.type === BranchType.Remote);
|
||||
|
||||
if (!branch) throw new Error('Remote Branch not found');
|
||||
|
||||
await pushDelete(this.baseDir, remote.name, name);
|
||||
}
|
||||
|
||||
private mapCommit(x: Commit): GitCommit {
|
||||
return {
|
||||
sha: x.sha,
|
||||
shortSha: x.shortSha,
|
||||
message: x.summary,
|
||||
date: x.author.date,
|
||||
parentCount: x.parentSHAs.length,
|
||||
author: {
|
||||
name: x.author.name,
|
||||
email: x.author.email
|
||||
},
|
||||
isLocalAhead: false,
|
||||
isRemoteAhead: false
|
||||
};
|
||||
}
|
||||
|
||||
private async getCommits(revisionRange: string | null, limit: number): Promise<ReadonlyArray<Commit>> {
|
||||
return await getCommits(this.baseDir, revisionRange, limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup the common git rules for it to work with Noodl.
|
||||
*/
|
||||
private async _setupRepository() {
|
||||
await setConfigValue(this.baseDir, 'core.precomposeUnicode', 'true');
|
||||
await setConfigValue(this.baseDir, 'core.protectNTFS', 'false');
|
||||
await setConfigValue(this.baseDir, 'core.longpaths', 'true');
|
||||
|
||||
await installMergeDriver(this.baseDir);
|
||||
|
||||
// Ignore these files (these rules are just saved in memory, not to .gitignore)
|
||||
await appendGitIgnore(this.baseDir, ['project-tmp.json*', '.DS_Store', '__MACOSX']);
|
||||
|
||||
// Create or append the .gitattributes file
|
||||
await appendGitAttributes(this.baseDir, ['project.json merge=noodl']);
|
||||
|
||||
const remoteName = await this.getRemoteName();
|
||||
if (remoteName) {
|
||||
const remoteUrl = await getRemoteURL(this.repositoryPath, remoteName);
|
||||
this.originUrl = remoteUrl?.trim();
|
||||
}
|
||||
|
||||
this.originProvider = this.getProviderForRemote(this.originUrl);
|
||||
}
|
||||
|
||||
private async _mergeToCurrentBranch(options: {
|
||||
theirsBranchName: string;
|
||||
squash?: boolean;
|
||||
message?: string;
|
||||
allowFastForward?: boolean;
|
||||
}): Promise<void> {
|
||||
// Get the target merge branch name
|
||||
const oursBranchName = await currentBranchName(this.baseDir);
|
||||
|
||||
// Stash changes if there are any local changes
|
||||
const status = await this.status();
|
||||
const needsStash = status.length > 0;
|
||||
|
||||
if (needsStash) {
|
||||
await this.stashPushChanges(oursBranchName);
|
||||
}
|
||||
|
||||
// Do the merge
|
||||
try {
|
||||
await this._merge({
|
||||
theirsBranchName: options.theirsBranchName,
|
||||
oursBranchName,
|
||||
isSquash: options.squash,
|
||||
message: options.message,
|
||||
allowFastForward: options.allowFastForward || true
|
||||
});
|
||||
} finally {
|
||||
// Reapply local changes
|
||||
if (needsStash) {
|
||||
await this.stashPopChanges(oursBranchName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async _merge(options: {
|
||||
message: string;
|
||||
/**
|
||||
* Previously this was called: fromBranchName
|
||||
*/
|
||||
theirsBranchName: string;
|
||||
/**
|
||||
* Previously this was called: targetBranchName
|
||||
*/
|
||||
oursBranchName: string;
|
||||
isSquash: boolean;
|
||||
squashNoCommit?: boolean;
|
||||
allowFastForward: boolean;
|
||||
}): Promise<void> {
|
||||
const branches = await getBranchesOld(this.baseDir);
|
||||
const ours = branches.find((x) => x.name === options.oursBranchName);
|
||||
const theirs = branches.find((x) => x.name === options.theirsBranchName);
|
||||
|
||||
if (!ours) throw new Error(`Branch '${options.oursBranchName}' not found.`);
|
||||
if (!theirs) throw new Error(`Branch '${options.theirsBranchName}' not found.`);
|
||||
|
||||
const message =
|
||||
options.message ||
|
||||
(options.isSquash
|
||||
? createSquashMessage(options.theirsBranchName)
|
||||
: createMergeMessage(options.theirsBranchName, options.oursBranchName));
|
||||
|
||||
try {
|
||||
// NOTE: We got a merge conflict, somewhere, somehow.
|
||||
// Issue: project.json file to added to Unmerged paths.
|
||||
// Solution: "git add/rm <file>" aka stage files
|
||||
await addAll(this.baseDir);
|
||||
|
||||
await merge(this.baseDir, options.theirsBranchName, {
|
||||
strategy: undefined,
|
||||
isSquash: options.isSquash,
|
||||
squashNoCommit: !!options.squashNoCommit,
|
||||
message,
|
||||
noFastForward: !options.allowFastForward
|
||||
});
|
||||
} catch (err) {
|
||||
console.info('there are merge conflicts that have to be resolved, so we will resolve them just after this.');
|
||||
console.info(err);
|
||||
}
|
||||
|
||||
// Our custom strategy
|
||||
const tree = await mergeTree(this.baseDir, ours, theirs);
|
||||
if (tree.kind === ComputedAction.Conflicts) {
|
||||
// Run our solve strategy
|
||||
const solver = new MergeStrategy(this.baseDir, this.mergeProject);
|
||||
await solver.solveConflicts(tree);
|
||||
|
||||
// Create a merge commit
|
||||
await createCommit(this.baseDir, message);
|
||||
} else if (tree.kind !== ComputedAction.Clean) {
|
||||
throw new Error('Failed to merge, ' + tree.kind);
|
||||
}
|
||||
}
|
||||
|
||||
public async getConfigValue(name: string) {
|
||||
return getConfigValue(this.baseDir, name);
|
||||
}
|
||||
|
||||
public async setConfigValue(name: string, value: string) {
|
||||
return setConfigValue(this.baseDir, name, value);
|
||||
}
|
||||
|
||||
public getProviderForRemote(remoteUrl: string): GitProvider {
|
||||
if (!remoteUrl) {
|
||||
return 'none';
|
||||
} else if (remoteUrl.includes('noodlapp.com')) {
|
||||
return 'noodl';
|
||||
} else if (remoteUrl.includes('github.com')) {
|
||||
return 'github';
|
||||
} else {
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
|
||||
public async setRemoteURL(remoteUrl: string) {
|
||||
const remoteName = await this.getRemoteName();
|
||||
|
||||
const url = remoteUrl?.trim();
|
||||
|
||||
//if there's no existing remote, add one called origin
|
||||
if (!remoteName) {
|
||||
await addRemote(this.repositoryPath, 'origin', url);
|
||||
return;
|
||||
}
|
||||
|
||||
this.originUrl = url;
|
||||
this.originProvider = this.getProviderForRemote(url);
|
||||
await setRemoteURL(this.repositoryPath, remoteName, url);
|
||||
}
|
||||
|
||||
public async tryHandleRebaseState() {
|
||||
const status = await this.status();
|
||||
const conflicts = status.filter((s) => s.status === 'conflicted').map((s) => s.path);
|
||||
|
||||
//check if the only issue is .gitignore and handle it
|
||||
if (conflicts.length === 1 && conflicts.includes('.gitignore')) {
|
||||
//resolve it with ours, just overwrite the remote
|
||||
//note that "ours" is reversed here since we're rebasing
|
||||
await git(['checkout', '--theirs', '.gitignore'], this.repositoryPath, 'pick-our-gitignore');
|
||||
await git(['add', '.gitignore'], this.repositoryPath, 'add-gitignore');
|
||||
await git(['rebase', '--continue'], this.repositoryPath, 'continue-rebase', {
|
||||
env: {
|
||||
GIT_EDITOR: 'true' //make sure the commit message editor doesn't pop up
|
||||
}
|
||||
});
|
||||
} else {
|
||||
throw new Error('Repository is in a rebase state. Please handle manually through a git client.');
|
||||
}
|
||||
}
|
||||
|
||||
public async isRebaseInProgress(): Promise<boolean> {
|
||||
try {
|
||||
//this will return the SHA of the commit that is being rebased, or throw if there's no rebase in progress
|
||||
await git(['rev-parse', '--verify', 'REBASE_HEAD'], this.repositoryPath, 'check-rebase');
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
27
packages/noodl-git/src/history.ts
Normal file
27
packages/noodl-git/src/history.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { getCommits } from './core/logs';
|
||||
import { Branch } from './core/models/branch';
|
||||
import { revRange } from './core/rev-list';
|
||||
|
||||
export function getLocalCommits(
|
||||
repositoryDir: string,
|
||||
branch: Branch | null,
|
||||
revisionRange: string | null,
|
||||
limit: number
|
||||
) {
|
||||
const args = ['--not', '--remotes'];
|
||||
const range = revisionRange ?? (branch?.upstream ? revRange(branch.upstream, branch.name) : 'HEAD');
|
||||
|
||||
return getCommits(repositoryDir, range, limit, undefined, args);
|
||||
}
|
||||
|
||||
export function getRemoteCommits(
|
||||
repositoryDir: string,
|
||||
branch: Branch | null,
|
||||
revisionRange: string | null,
|
||||
limit: number
|
||||
) {
|
||||
const args = ['--remotes'];
|
||||
const range = revisionRange ?? (branch?.upstream ? revRange(branch.upstream, branch.name) : 'HEAD');
|
||||
|
||||
return getCommits(repositoryDir, range, limit, undefined, args);
|
||||
}
|
||||
5
packages/noodl-git/src/index.ts
Normal file
5
packages/noodl-git/src/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export * from "./models";
|
||||
export * from "./git";
|
||||
export * from "./actions";
|
||||
|
||||
export { setRequestGitAccount } from "./core/trampoline/trampoline-askpass-handler";
|
||||
53
packages/noodl-git/src/merge-driver.js
Normal file
53
packages/noodl-git/src/merge-driver.js
Normal file
@@ -0,0 +1,53 @@
|
||||
const fs = require('fs');
|
||||
|
||||
const isRenderer = process && process.type === 'renderer';
|
||||
|
||||
// TODO: Remove electron
|
||||
const app = isRenderer ? require('@electron/remote').app : require('electron').app;
|
||||
const tmpFolder = app.getPath('temp');
|
||||
|
||||
const driverOptionsPath = tmpFolder + 'noodl-merge-driver-options.json';
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {{ reversed: boolean; }} options
|
||||
* @returns
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* @param {{
|
||||
* reversed: boolean;
|
||||
* }} options
|
||||
* @returns
|
||||
*/
|
||||
writeMergeDriverOptions(options) {
|
||||
if (typeof options !== 'object') throw new Error('options is not an object');
|
||||
return fs.promises.writeFile(driverOptionsPath, JSON.stringify(options));
|
||||
},
|
||||
|
||||
cleanMergeDriverOptionsSync() {
|
||||
if (fs.existsSync(driverOptionsPath)) {
|
||||
fs.unlinkSync(driverOptionsPath);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {{
|
||||
* reversed: boolean;
|
||||
* }}
|
||||
*/
|
||||
readMergeDriverOptionsSync() {
|
||||
try {
|
||||
if (fs.existsSync(driverOptionsPath)) {
|
||||
const options = fs.readFileSync(driverOptionsPath);
|
||||
return JSON.parse(options);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
};
|
||||
103
packages/noodl-git/src/merge-strategy.ts
Normal file
103
packages/noodl-git/src/merge-strategy.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { IMergeTreeEntry, MergeTreeError } from './core/models/merge';
|
||||
import { getFileContents } from './core/cat-file';
|
||||
import { addAll } from './core/add';
|
||||
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
function tryParseJson(str: string) {
|
||||
try {
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
type JSON_OBJECT = {
|
||||
[key: string]: any;
|
||||
};
|
||||
|
||||
/** Merge the project.json file */
|
||||
export type MergeStrategyFunc = (ancestors: JSON_OBJECT, ours: JSON_OBJECT, theirs: JSON_OBJECT) => JSON_OBJECT;
|
||||
|
||||
/**
|
||||
* Defines the Noodl custom merge strategy.
|
||||
*/
|
||||
export class MergeStrategy {
|
||||
constructor(
|
||||
public readonly repositoryDir: string,
|
||||
private readonly mergeProject: MergeStrategyFunc,
|
||||
public readonly strategy: 'our' | 'their' = 'our'
|
||||
) {}
|
||||
|
||||
public async solveConflicts(tree: MergeTreeError) {
|
||||
const projectJsonFiles = new Set<IMergeTreeEntry>(); //handle repos with multiple noodl projects
|
||||
const otherFiles = new Set<IMergeTreeEntry>();
|
||||
|
||||
for (const entry of tree.conflictedEntries) {
|
||||
if (entry.hasConflicts) {
|
||||
const parent = entry.base || entry.our || entry.their;
|
||||
if (parent.path === 'project.json' || parent.path.slice(-13) === '/project.json') {
|
||||
projectJsonFiles.add(entry);
|
||||
} else {
|
||||
otherFiles.add(entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve conflicts
|
||||
const projectPromises = Array.from(projectJsonFiles).map((x) => this.resolveProjectJsonConflict(x));
|
||||
const otherFilePromises = Array.from(otherFiles).map((x) => this.resolveFileConflict(x));
|
||||
|
||||
const allPromises = [...projectPromises, ...otherFilePromises];
|
||||
await Promise.all(allPromises);
|
||||
|
||||
// Mark all the file as resolved resolution
|
||||
await addAll(this.repositoryDir);
|
||||
|
||||
// Return all files that were resolved
|
||||
return [...projectJsonFiles, ...otherFiles];
|
||||
}
|
||||
|
||||
private async resolveFileConflict(entry: IMergeTreeEntry) {
|
||||
const parent = entry.base || entry.our || entry.their;
|
||||
console.log('resolveFileConflict', parent.path);
|
||||
|
||||
const targetEntry = entry[this.strategy];
|
||||
console.log('vcs: resolving file conflict', targetEntry.path, 'using', this.strategy);
|
||||
const ourBlob = await getFileContents(this.repositoryDir, targetEntry.sha);
|
||||
|
||||
const absolutePath = path.join(this.repositoryDir, parent.path);
|
||||
// Write our version of the file
|
||||
await fs.promises.writeFile(absolutePath, ourBlob);
|
||||
}
|
||||
|
||||
private async resolveProjectJsonConflict(entry: IMergeTreeEntry) {
|
||||
const parent = entry.base || entry.our || entry.their;
|
||||
console.log('resolveProjectJsonConflict', parent.path);
|
||||
|
||||
const results = await Promise.all([
|
||||
entry.base ? getFileContents(this.repositoryDir, entry.base.sha) : Promise.resolve('{}'),
|
||||
entry.our ? getFileContents(this.repositoryDir, entry.our.sha) : Promise.resolve('{}'),
|
||||
entry.their ? getFileContents(this.repositoryDir, entry.their.sha) : Promise.resolve('{}')
|
||||
]);
|
||||
|
||||
const ancestor = tryParseJson(results[0].toString());
|
||||
const ours = tryParseJson(results[1].toString());
|
||||
const theirs = tryParseJson(results[2].toString());
|
||||
|
||||
if (!ancestor) {
|
||||
throw new Error("Failed to solve project.json conflict. Couldn't parse ancestor: " + results[0].toString());
|
||||
}
|
||||
if (!ours) {
|
||||
throw new Error("Failed to solve project.json conflict. Couldn't parse ours: " + results[1].toString());
|
||||
} else if (!theirs) {
|
||||
throw new Error("Failed to solve project.json conflict. Couldn't parse theirs: " + results[2].toString());
|
||||
}
|
||||
|
||||
const mergedResult = this.mergeProject(ancestor, ours, theirs);
|
||||
|
||||
const absolutePath = path.join(this.repositoryDir, parent.path);
|
||||
await fs.promises.writeFile(absolutePath, JSON.stringify(mergedResult));
|
||||
}
|
||||
}
|
||||
70
packages/noodl-git/src/models.ts
Normal file
70
packages/noodl-git/src/models.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
// This is the models used by Noold right now
|
||||
import { ICloneProgress } from "./core/models/progress";
|
||||
|
||||
import { FileStatusKind } from "./core/models/status";
|
||||
|
||||
export type GitUserInfo = {
|
||||
name: string;
|
||||
email: string;
|
||||
};
|
||||
|
||||
export type GitStatus = {
|
||||
status: "new" | "modified" | "deleted" | "conflicted";
|
||||
path: string;
|
||||
};
|
||||
|
||||
export function ConvertStatusKindToGitStatus(
|
||||
x: FileStatusKind
|
||||
): GitStatus["status"] {
|
||||
switch (x) {
|
||||
case FileStatusKind.New:
|
||||
return "new";
|
||||
case FileStatusKind.Modified:
|
||||
return "modified";
|
||||
case FileStatusKind.Deleted:
|
||||
return "deleted";
|
||||
case FileStatusKind.Copied:
|
||||
return "modified";
|
||||
case FileStatusKind.Renamed:
|
||||
return "modified";
|
||||
case FileStatusKind.Conflicted:
|
||||
return "conflicted";
|
||||
case FileStatusKind.Untracked:
|
||||
return "new";
|
||||
}
|
||||
}
|
||||
|
||||
export type GitCommit = {
|
||||
sha: string;
|
||||
shortSha: string;
|
||||
message: string;
|
||||
date: Date;
|
||||
parentCount: number;
|
||||
author: {
|
||||
name: string;
|
||||
email: string;
|
||||
};
|
||||
|
||||
isLocalAhead: boolean;
|
||||
isRemoteAhead: boolean;
|
||||
};
|
||||
|
||||
export type GitBranch = {
|
||||
name: string;
|
||||
local: boolean;
|
||||
remote: boolean;
|
||||
};
|
||||
|
||||
export type GitCloneOptions = {
|
||||
url: string;
|
||||
directory: string;
|
||||
/** Clone a single branch; this if only for testing. */
|
||||
singleBranch?: boolean;
|
||||
onProgress?: (progress: ICloneProgress) => void;
|
||||
};
|
||||
|
||||
export class GitEmptyRepositoryError extends Error {
|
||||
constructor() {
|
||||
super("The repository you're trying to clone is empty.");
|
||||
}
|
||||
}
|
||||
57
packages/noodl-git/src/paths.ts
Normal file
57
packages/noodl-git/src/paths.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import path from "path";
|
||||
import { getDesktopTrampolinePath as desktopGetDesktopTrampolinePath } from "desktop-trampoline";
|
||||
|
||||
let cache_desktopTrampolinePath: string = null;
|
||||
|
||||
/**
|
||||
* Returns and caches the desktop trampoline path,
|
||||
* via "desktop-trampoline"'s helper method.
|
||||
*
|
||||
* @returns
|
||||
*/
|
||||
function getDesktopTrampolinePathHelper() {
|
||||
if (cache_desktopTrampolinePath !== null) return cache_desktopTrampolinePath;
|
||||
// getDesktopTrampolinePath takes about 5ms, caching it removes the time.
|
||||
cache_desktopTrampolinePath = desktopGetDesktopTrampolinePath();
|
||||
|
||||
//this is required due to this limitation in electron: https://github.com/electron/electron/issues/8206
|
||||
//only affects production builds
|
||||
cache_desktopTrampolinePath = cache_desktopTrampolinePath.replace(
|
||||
"asar",
|
||||
"asar.unpacked"
|
||||
);
|
||||
|
||||
return cache_desktopTrampolinePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the desktop trampoline path, where we expect it to be.
|
||||
*
|
||||
* @returns
|
||||
*/
|
||||
export function getDesktopTrampolinePath(): string {
|
||||
if (process.env.LOCAL_GIT_TRAMPOLINE_DIRECTORY) {
|
||||
return process.env.LOCAL_GIT_TRAMPOLINE_DIRECTORY;
|
||||
}
|
||||
|
||||
return path
|
||||
.resolve(
|
||||
__dirname,
|
||||
"..",
|
||||
"..",
|
||||
"node_modules",
|
||||
"desktop-trampoline",
|
||||
"build",
|
||||
"Release",
|
||||
"desktop-trampoline"
|
||||
)
|
||||
.replace(/[\\\/]app.asar[\\\/]/, "/app.asar.unpacked/");
|
||||
}
|
||||
|
||||
export function getGitPath(): string {
|
||||
// This is required because dugite is not imported as external package.
|
||||
// Where __dirname would otherwise be the package path.
|
||||
return path
|
||||
.resolve(__dirname, "..", "..", "node_modules", "dugite", "git")
|
||||
.replace(/[\\\/]app.asar[\\\/]/, "/app.asar.unpacked/");
|
||||
}
|
||||
3
packages/noodl-git/tsconfig.json
Normal file
3
packages/noodl-git/tsconfig.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json"
|
||||
}
|
||||
Reference in New Issue
Block a user