Merge branch 'development' into wip-ok-cancel-dialog-buttons

This commit is contained in:
Markus Olsson 2019-11-05 15:47:09 +01:00
commit 9724e46716
363 changed files with 32853 additions and 10204 deletions

View file

@ -3,7 +3,7 @@ version: 2
defaults: &defaults
working_directory: ~/desktop/desktop
macos:
xcode: '9.3.0'
xcode: '9.4.1'
jobs:
build:

View file

@ -3,32 +3,5 @@ coverage:
round: nearest
# thresholds for red to green color coding
range: '10...80'
status:
project:
lib-git:
target: auto
threshold: 5%
base: pr
paths: 'app/src/lib/git'
lib-app:
target: auto
threshold: 5%
base: pr
paths:
- 'app/src/lib/stores'
- 'app/src/lib/databases'
models:
target: auto
threshold: 5%
base: pr
paths: 'app/src/models'
ui-components:
target: auto
threshold: 5%
base: pr
paths: 'app/src/ui'
patch: no
changes: no
status: off
comment: off

View file

@ -6,6 +6,10 @@ plugins:
- react
- json
settings:
react:
version: '16.3'
extends:
- prettier
- prettier/react
@ -36,8 +40,6 @@ rules:
# this rule now works but generates a lot of issues with the codebase
# '@typescript-eslint/member-ordering': error
'@typescript-eslint/type-annotation-spacing': error
# Babel
babel/no-invalid-this: error
@ -70,6 +72,7 @@ rules:
strict:
- error
- global
no-buffer-constructor: error
###########
# SPECIAL #

View file

@ -67,7 +67,7 @@ to see if the problem has already been reported. If it does exist, add a
:thumbsup: to the issue to indicate this is also an issue for you, and add a
comment to the existing issue if there is extra information you can contribute.
#### How Do I Submit A (Good) Bug Report?
#### How Do I Submit A Bug Report?
Bugs are tracked as [GitHub issues](https://guides.github.com/features/issues/).
@ -102,7 +102,7 @@ to see if the enhancement has already been suggested. If it has, add a
:thumbsup: to indicate your interest in it, or comment if there is additional
information you would like to add.
#### How Do I Submit A (Good) Enhancement Suggestion?
#### How Do I Submit An Enhancement Suggestion?
Enhancement suggestions are tracked as [GitHub issues](https://guides.github.com/features/issues/).
@ -150,4 +150,4 @@ These documents are useful resources for contributors to learn more about the p
- [Release Planning](https://github.com/desktop/desktop/blob/development/docs/process/release-planning.md)
- [Issue Triage](https://github.com/desktop/desktop/blob/development/docs/process/issue-triage.md)
- [Issue and Pull Request Labels](https://github.com/desktop/desktop/blob/development/docs/process/labels.md)
- [Pull Request Triage](https://github.com/desktop/desktop/blob/development/docs/process/pull-request-triage.md)
- [Pull Requests](https://github.com/desktop/desktop/blob/development/docs/process/pull-requests.md)

View file

@ -4,68 +4,37 @@ about: Report a problem encountered while using GitHub Desktop
---
<!--
First and foremost, wed like to thank you for taking the time to contribute to our project. Before submitting your issue, please follow these steps:
### Describe the bug
1. Familiarize yourself with our contributing guide:
* https://github.com/desktop/desktop/blob/development/.github/CONTRIBUTING.md#contributing-to-github-desktop
2. Check if your issue (and sometimes workaround) is in the known-issues doc:
* https://github.com/desktop/desktop/blob/development/docs/known-issues.md
3. Make sure your issue isnt a duplicate of another issue
4. If you have made it to this step, go ahead and fill out the template below
-->
A clear and concise description of what the bug is.
## Description
<!--
Provide a detailed description of the behavior you're seeing or the behavior you'd like to see **below** this comment.
-->
### Version & OS
Open 'About GitHub Desktop' menu to see the Desktop version. Also include what operating system you are using.
## Version
<!--
Place the version of GitHub Desktop you have installed **below** this comment. This is displayed under the 'About GitHub Desktop' menu item. If you are running from source, include the commit by running `git rev-parse HEAD` from the local repository.
-->
* GitHub Desktop:
<!--
Place the version of your operating system **below** this comment. The operating system you are running on may also help with reproducing the issue. If you are on macOS, launch 'About This Mac' and write down the OS version listed. If you are on Windows, open 'Command Prompt' and attach the output of this command: 'cmd /c ver'
-->
* Operating system:
### Steps to reproduce the behavior
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
## Steps to Reproduce
<!--
List the steps to reproduce your issue **below** this comment
ex,
1. `step 1`
2. `step 2`
3. `and so on…`
-->
### Expected behavior
### Expected Behavior
<!-- What you expected to happen -->
A clear and concise description of what you expected to happen.
### Actual Behavior
<!-- What actually happens -->
### Actual behavior
A clear and concise description of what actually happened.
## Additional Information
<!--
Place any additional information, configuration, or data that might be necessary to reproduce the issue **below** this comment.
### Screenshots
If you have screen shots or gifs that demonstrate the issue, please include them.
If the issue involves a specific public repository, including the information about it will make it easier to recreate the issue.
If you are dealing with a performance issue or regression, attaching a Performance profile of the task will help the developers understand the runtime behavior of the application on your machine.
https://github.com/desktop/desktop/blob/development/docs/contributing/timeline-profile.md
-->
Add screenshots to help explain your problem, if applicable.
### Logs
<!--
Attach your log file (You can simply drag your file here to insert it) to this issue. Please make sure the generated link to your log file is **below** this comment section otherwise it will not appear when you submit your issue.
macOS logs location: `~/Library/Application Support/GitHub Desktop/logs/*.desktop.production.log`
Windows logs location: `%APPDATA%\GitHub Desktop\logs\*.desktop.production.log`
Attach your logs by opening the `Help` menu and selecting `Show Logs...`, if applicable.
The log files are organized by date, so see if anything was generated for today's date.
-->
### Additional context
Add any other context about the problem here.

View file

@ -1,15 +1,17 @@
---
name: "\U0001F389 Problem to raise"
name: "\U00002B50 Submit a request or solve a problem"
about: Surface a problem that you think should be solved
---
**Please describe the problem you think should be solved**
A clear and concise description of what the problem is and who else might be impacted. Screenshots are encouraged.
### Describe the feature or problem youd like to solve
Example:
A clear and concise description of what the feature or problem is.
> “When I run into a merge conflict, I dont know where to go to resolve it. Anyone who works off of multiple branches will likely run into this problem at least occasionally.”
### Proposed solution
**[Optional] Do you have any potential solutions in mind?**
A clear and concise description of one or more solutions you think might solve the problem. Please include any considered drawbacks or tradeoffs, and how users might use your solution(s). Screenshots or mockups are helpful here!
How will it benefit Desktop and its users?
### Additional context
Add any other context like screenshots or mockups are helpful, if applicable.

2
.github/config.yml vendored
View file

@ -13,7 +13,7 @@ requestInfoReplyComment: >
Thanks for understanding and meeting us halfway 😀
requestInfoLabelToAdd: more-information-needed
requestInfoLabelToAdd: more-info-needed
requestInfoOn:
pullRequest: false

View file

@ -1,40 +1,24 @@
## Overview
<!--
What issue are you addressing? (for example, #1234)
If an issue doesn't exist for this pull request (PR) to address, please open one
to allow for discussion before opening this PR.
You can open a new issue at https://github.com/desktop/desktop/issues/new/choose
What GitHub Desktop issue does this PR address? (for example, #1234)
-->
**Closes #{issue number}**
Closes #[issue number]
## Description
-
### Screenshots
<!--
If this PR touches the UI layer of the app, please include screenshots or animated gifs to show the changes.
-->
## Release notes
<!--
If this is related to a feature, bugfix or improvement, we'd love your help to
summarize these changes to assist with drafting the release notes when this pull
request is merged.
You can leave this blank if you're not sure.
If you don't believe this PR needs to be mentioned in the release notes, write "Notes: no-notes".
Some examples of changelog entries from earlier releases:
- Adds support for Python 3 in GitHub Desktop CLI for macOS users
- Fixes problem with commit being reset when switching between History and Changes tabs
- Fixes caret in co-author selector, which is hidden when dark theme is enabled
- Improves status parsing performance when handling thousands of changed files
-->
Notes:

1
.gitignore vendored
View file

@ -13,3 +13,4 @@ app/node_modules/
*.iml
.envrc
junit*.xml
*.swp

View file

@ -1 +1 @@
8.12.0
10.16.0

2
.nvmrc
View file

@ -1 +1 @@
v8.12.0
v10

View file

@ -12,5 +12,4 @@ app/coverage
app/static/common
app/test/fixtures
gemoji
*.json
*.md

View file

@ -1,2 +1,2 @@
python 2.7
nodejs 8.12.0
python 2.7.16
nodejs 10.15.3

View file

@ -1,52 +0,0 @@
notifications:
email:
on_success: never
on_failure: change
dist: trusty
os: linux
compiler: clang
env:
- CC=clang CXX=clang++ npm_config_clang=1
addons:
apt:
packages:
# this is required to compile keytar
- libsecret-1-dev
# this package is essential for testing Electron in a headless fashion
# https://github.com/electron/electron/blob/master/docs/tutorial/testing-on-headless-ci.md
- xvfb
branches:
only:
- development
- /^__release-.*/
language: node_js
node_js:
- '8.12'
cache:
yarn: true
timeout: 600
directories:
- $HOME/.electron
- .eslintcache
- $HOME/.cache/electron-builder
install:
- yarn install --force
script:
- yarn lint && yarn validate-changelog && yarn check-modified && yarn
build:prod && yarn test:setup && yarn test
after_success:
- yarn test:report
after_failure:
- yarn test:report
- yarn test:review

View file

@ -2,8 +2,7 @@
"recommendations": [
"ms-vscode.vscode-typescript-tslint-plugin",
"msjsdiag.debugger-for-chrome",
"samverschueren.final-newline",
"DmitryDorofeev.empty-indent",
"esbenp.prettier-vscode"
"esbenp.prettier-vscode",
"dbaeumer.vscode-eslint"
]
}

10
.vscode/launch.json vendored
View file

@ -14,12 +14,12 @@
],
"console": "integratedTerminal",
"internalConsoleOptions": "neverOpen",
"env":{
"env": {
"ELECTRON_RUN_AS_NODE": "1"
},
"windows": {
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/electron.cmd"
},
}
},
{
"type": "node",
@ -35,12 +35,12 @@
],
"console": "integratedTerminal",
"internalConsoleOptions": "neverOpen",
"env":{
"env": {
"ELECTRON_RUN_AS_NODE": "1"
},
"windows": {
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/electron.cmd"
},
},
}
}
]
}

17
.vscode/settings.json vendored
View file

@ -1,10 +1,11 @@
{
"typescript.tsdk": "./node_modules/typescript/lib",
"search.exclude": {
"**/node_modules": true,
".awcache": true,
"**/dist": true,
"**/node_modules": true,
"**/out": true,
".awcache": true
"app/test/fixtures": true
},
"files.exclude": {
"**/.git": true,
@ -24,5 +25,15 @@
"prettier.trailingComma": "es5",
"editor.formatOnSave": true,
"prettier.ignorePath": ".prettierignore",
"tslint.ignoreDefinitionFiles": true
"tslint.ignoreDefinitionFiles": true,
"eslint.options": {
"configFile": ".eslintrc.yml",
"rulePaths": ["eslint-rules"]
},
"eslint.validate": [
"javascript",
"javascriptreact",
"typescript",
"typescriptreact"
]
}

View file

@ -1,4 +1,4 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
yarn-path "./vendor/yarn-1.15.2.js"
yarn-path "./vendor/yarn-1.17.3.js"

View file

@ -21,13 +21,6 @@ Download the official installer for your operating system:
- [Windows](https://central.github.com/deployments/desktop/desktop/latest/win32)
- [Windows machine-wide install](https://central.github.com/deployments/desktop/desktop/latest/win32?format=msi)
There are several community-supported package managers that can be used to install Github Desktop.
- Windows users can install using [Chocolatey](https://chocolatey.org/) package manager:
`c:\> choco install github-desktop`
- macOS users can install using [Homebrew](https://brew.sh/) package manager:
`$ brew cask install github`
- Arch Linux users can install the latest version from the [AUR](https://aur.archlinux.org/packages/github-desktop/).
You can install this alongside your existing GitHub Desktop for Mac or GitHub
Desktop for Windows application.
@ -43,6 +36,21 @@ beta channel to get access to early builds of Desktop:
- [macOS](https://central.github.com/deployments/desktop/desktop/latest/darwin?env=beta)
- [Windows](https://central.github.com/deployments/desktop/desktop/latest/win32?env=beta)
### Community Releases
There are several community-supported package managers that can be used to
install GitHub Desktop:
- Windows users can install using [Chocolatey](https://chocolatey.org/) package manager:
`c:\> choco install github-desktop`
- macOS users can install using [Homebrew](https://brew.sh/) package manager:
`$ brew cask install github`
Installers for various Linux distributions can be found on the
[`shiftkey/desktop`](https://github.com/shiftkey/desktop) fork.
Arch Linux users can install the latest version from the
[AUR](https://aur.archlinux.org/packages/github-desktop-bin/).
## Is GitHub Desktop right for me? What are the primary areas of focus?
[This document](https://github.com/desktop/desktop/blob/development/docs/process/what-is-desktop.md) describes the focus of GitHub Desktop and who the product is most useful for.

View file

@ -1,4 +1,4 @@
runtime = electron
disturl = https://atom.io/download/electron
target = 3.1.6
target = 5.0.6
arch = x64

View file

@ -1,4 +1,4 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
yarn-path "../vendor/yarn-1.15.2.js"
yarn-path "../vendor/yarn-1.17.3.js"

View file

@ -3,7 +3,7 @@
"productName": "GitHub Desktop",
"bundleID": "com.github.GitHubClient",
"companyName": "GitHub, Inc.",
"version": "1.7.0-beta1",
"version": "2.2.2",
"main": "./main.js",
"repository": {
"type": "git",
@ -26,15 +26,16 @@
"deep-equal": "^1.0.1",
"dexie": "^2.0.0",
"double-ended-queue": "^2.1.0-0",
"dugite": "1.87.0",
"electron-window-state": "^4.0.2",
"dugite": "1.87.2",
"electron-window-state": "^5.0.3",
"event-kit": "^2.0.0",
"file-uri-to-path": "0.0.2",
"file-url": "^2.0.2",
"fs-admin": "^0.3.0",
"fs-admin": "^0.3.1",
"fs-extra": "^6.0.0",
"fuzzaldrin-plus": "^0.6.0",
"keytar": "^4.4.1",
"mem": "^4.3.0",
"memoize-one": "^4.0.3",
"moment": "^2.24.0",
"mri": "^1.1.0",
@ -48,7 +49,7 @@
"react-dom": "^16.3.2",
"react-transition-group": "^1.2.0",
"react-virtualized": "^9.20.0",
"registry-js": "^1.0.7",
"registry-js": "^1.4.0",
"source-map-support": "^0.4.15",
"strip-ansi": "^4.0.0",
"textarea-caret": "^3.0.2",

View file

@ -1,7 +1,9 @@
import chalk from 'chalk'
import * as Path from 'path'
import { ICommandModule, mriArgv } from '../load-commands'
import { openDesktop } from '../open-desktop'
import { parseRemote } from '../../lib/remote-parsing'
const command: ICommandModule = {
command: 'open <path>',
@ -21,9 +23,18 @@ const command: ICommandModule = {
openDesktop()
return
}
const repositoryPath = Path.resolve(process.cwd(), pathArg)
const url = `openLocalRepo/${encodeURIComponent(repositoryPath)}`
openDesktop(url)
//Check if the pathArg is a remote url
if (parseRemote(pathArg) != null) {
console.log(
`\nYou cannot open a remote URL in GitHub Desktop\n` +
`Use \`${chalk.bold(`git clone ` + pathArg)}\`` +
` instead to initiate the clone`
)
} else {
const repositoryPath = Path.resolve(process.cwd(), pathArg)
const url = `openLocalRepo/${encodeURIComponent(repositoryPath)}`
openDesktop(url)
}
},
}
export = command

View file

@ -70,6 +70,7 @@ main {
display: flex;
flex-direction: column;
flex-grow: 1;
min-height: 0;
.footer {
flex: none;

View file

@ -113,6 +113,7 @@ const extensionModes: ReadonlyArray<IModeDefinition> = [
'.vcxproj': 'text/xml',
'.vbproj': 'text/xml',
'.svg': 'text/xml',
'.resx': 'text/xml',
},
},
{
@ -265,6 +266,126 @@ const extensionModes: ReadonlyArray<IModeDefinition> = [
'.jl': 'text/x-julia',
},
},
{
install: () => import('codemirror/mode/stex/stex'),
mappings: {
'.tex': 'text/x-stex',
},
},
{
install: () => import('codemirror/mode/sparql/sparql'),
mappings: {
'.rq': 'application/sparql-query',
},
},
{
install: () => import('codemirror/mode/stylus/stylus'),
mappings: {
'.styl': 'text/x-styl',
},
},
{
install: () => import('codemirror/mode/soy/soy'),
mappings: {
'.soy': 'text/x-soy',
},
},
{
install: () => import('codemirror/mode/smalltalk/smalltalk'),
mappings: {
'.st': 'text/x-stsrc',
},
},
{
install: () => import('codemirror/mode/slim/slim'),
mappings: {
'.slim': 'application/x-slim',
},
},
{
install: () => import('codemirror/mode/sieve/sieve'),
mappings: {
'.sieve': 'application/sieve',
},
},
{
install: () => import('codemirror/mode/scheme/scheme'),
mappings: {
'.ss': 'text/x-scheme',
'.sls': 'text/x-scheme',
'.scm': 'text/x-scheme',
},
},
{
install: () => import('codemirror/mode/rst/rst'),
mappings: {
'.rst': 'text/x-rst',
},
},
{
install: () => import('codemirror/mode/rpm/rpm'),
mappings: {
'.rpm': 'text/x-rpm-spec',
},
},
{
install: () => import('codemirror/mode/q/q'),
mappings: {
'.q': 'text/x-q',
},
},
{
install: () => import('codemirror/mode/puppet/puppet'),
mappings: {
'.pp': 'text/x-puppet',
},
},
{
install: () => import('codemirror/mode/pug/pug'),
mappings: {
'.pug': 'text/x-pug',
},
},
{
install: () => import('codemirror/mode/protobuf/protobuf'),
mappings: {
'.proto': 'text/x-protobuf',
},
},
{
install: () => import('codemirror/mode/properties/properties'),
mappings: {
'.properties': 'text/x-properties',
'.gitattributes': 'text/x-properties',
'.gitignore': 'text/x-properties',
'.editorconfig': 'text/x-properties',
'.ini': 'text/x-ini',
},
},
{
install: () => import('codemirror/mode/pig/pig'),
mappings: {
'.pig': 'text/x-pig',
},
},
{
install: () => import('codemirror/mode/asciiarmor/asciiarmor'),
mappings: {
'.pgp': 'application/pgp',
},
},
{
install: () => import('codemirror/mode/oz/oz'),
mappings: {
'.oz': 'text/x-oz',
},
},
{
install: () => import('codemirror/mode/pascal/pascal'),
mappings: {
'.pas': 'text/x-pascal',
},
},
]
/**

View file

@ -14,6 +14,50 @@ import { uuid } from './uuid'
import { getAvatarWithEnterpriseFallback } from './gravatar'
import { getDefaultEmail } from './email'
/**
* Optional set of configurable settings for the fetchAll method
*/
interface IFetchAllOptions<T> {
/**
* The number of results to ask for on each page when making
* requests to paged API endpoints.
*/
perPage?: number
/**
* An optional predicate which determines whether or not to
* continue loading results from the API. This can be used
* to put a limit on the number of results to return from
* a paged API resource.
*
* As an example, to stop loading results after 500 results:
*
* `(results) => results.length < 500`
*
* @param results All results retrieved thus far
*/
continue?: (results: ReadonlyArray<T>) => boolean
/**
* Calculate the next page path given the response.
*
* Optional, see `getNextPagePathFromLink` for the default
* implementation.
*/
getNextPagePath?: (response: Response) => string | null
/**
* Whether or not to silently suppress request errors and
* return the results retrieved thus far. If this field is
* `true` the fetchAll method will suppress errors (this is
* also the default behavior if no value is provided for
* this field). Setting this field to false will cause the
* fetchAll method to throw if it encounters an API error
* on any page.
*/
suppressErrors?: boolean
}
const username: () => Promise<string> = require('username')
const ClientID = process.env.TEST_ENV ? '' : __OAUTH_CLIENT_ID__
@ -27,8 +71,14 @@ if (!ClientID || !ClientID.length || !ClientSecret || !ClientSecret.length) {
type GitHubAccountType = 'User' | 'Organization'
/** The OAuth scopes we need. */
const Scopes = ['repo', 'user']
/** The OAuth scopes we want to request from GitHub.com. */
const DotComOAuthScopes = ['repo', 'user', 'workflow']
/**
* The OAuth scopes we want to request from GitHub
* Enterprise Server.
*/
const EnterpriseOAuthScopes = ['repo', 'user']
enum HttpStatusCode {
NotModified = 304,
@ -129,6 +179,12 @@ export interface IAPIMentionableUser {
readonly name: string
}
/**
* Error thrown by `fetchUpdatedPullRequests` when receiving more results than
* what the `maxResults` parameter allows for.
*/
export class MaxResultsError extends Error {}
/**
* `null` can be returned by the API for legacy reasons. A non-null value is
* set for the primary email address currently, but in the future visibility
@ -176,6 +232,23 @@ export interface IAPIRefStatus {
readonly statuses: ReadonlyArray<IAPIRefStatusItem>
}
/** Branch information returned by the GitHub API */
export interface IAPIBranch {
/**
* The name of the branch stored on the remote.
*
* NOTE: this is NOT a fully-qualified ref (i.e. `refs/heads/master`)
*/
readonly name: string
/**
* Branch protection settings:
*
* - `true` indicates that the branch is protected in some way
* - `false` indicates no branch protection set
*/
readonly protected: boolean
}
interface IAPIPullRequestRef {
readonly ref: string
readonly sha: string
@ -192,9 +265,11 @@ export interface IAPIPullRequest {
readonly number: number
readonly title: string
readonly created_at: string
readonly updated_at: string
readonly user: IAPIIdentity
readonly head: IAPIPullRequestRef
readonly base: IAPIPullRequestRef
readonly state: 'open' | 'closed'
}
/** The metadata about a GitHub server. */
@ -235,7 +310,7 @@ interface ISearchResults<T> {
*
* If no link rel next header is found this method returns null.
*/
function getNextPagePath(response: Response): string | null {
function getNextPagePathFromLink(response: Response): string | null {
const linkHeader = response.headers.get('Link')
if (!linkHeader) {
@ -255,6 +330,91 @@ function getNextPagePath(response: Response): string | null {
return null
}
/**
* Parses the 'next' Link header from GitHub using
* `getNextPagePathFromLink`. Unlike `getNextPagePathFromLink`
* this method will attempt to double the page size when
* the current page index and the page size allows for it
* leading to a ramp up in page size.
*
* This might sound confusing, and it is, but the primary use
* case for this is when retrieving updated PRs. By specifying
* an initial page size of, for example, 10 this method will
* increase the page size to 20 once the second page has been
* loaded. See the table below for an example. The ramp-up
* will stop at a page size of 100 since that's the maximum
* that the GitHub API supports.
*
* ```
* |-----------|------|-----------|-----------------|
* | Request # | Page | Page size | Retrieved items |
* |-----------|------|-----------|-----------------|
* | 1 | 1 | 10 | 10 |
* | 2 | 2 | 10 | 20 |
* | 3 | 2 | 20 | 40 |
* | 4 | 2 | 40 | 80 |
* | 5 | 2 | 80 | 160 |
* | 6 | 3 | 80 | 240 |
* | 7 | 4 | 80 | 320 |
* | 8 | 5 | 80 | 400 |
* | 9 | 5 | 100 | 500 |
* |-----------|------|-----------|-----------------|
* ```
* This algorithm means we can have the best of both worlds.
* If there's a small number of changed pull requests since
* our last update we'll do small requests that use minimal
* bandwidth but if we encounter a repository where a lot
* of PRs have changed since our last fetch (like a very
* active repository or one we haven't fetched in a long time)
* we'll spool up our page size in just a few requests and load
* in bulk.
*
* As an example I used a very active internal repository and
* asked for all PRs updated in the last 24 hours which was 320.
* With the previous regime of fetching with a page size of 10
* that obviously took 32 requests. With this new regime it
* would take 7.
*/
export function getNextPagePathWithIncreasingPageSize(response: Response) {
const nextPath = getNextPagePathFromLink(response)
if (!nextPath) {
return null
}
const { pathname, query } = URL.parse(nextPath, true)
const { per_page, page } = query
const pageSize = typeof per_page === 'string' ? parseInt(per_page, 10) : NaN
const pageNumber = typeof page === 'string' ? parseInt(page, 10) : NaN
if (!pageSize || !pageNumber) {
return nextPath
}
// Confusing, but we're looking at the _next_ page path here
// so the current is whatever came before it.
const currentPage = pageNumber - 1
// Number of received items thus far
const received = currentPage * pageSize
// Can't go above 100, that's the max the API will allow.
const nextPageSize = Math.min(100, pageSize * 2)
// Have we received exactly the amount of items
// such that doubling the page size and loading the
// second page would seamlessly fit? No sense going
// above 100 since that's the max the API supports
if (pageSize !== nextPageSize && received % nextPageSize === 0) {
query.per_page = `${nextPageSize}`
query.page = `${received / nextPageSize + 1}`
return URL.format({ pathname, query })
}
return nextPath
}
/**
* Returns an ISO 8601 time string with second resolution instead of
* the standard javascript toISOString which returns millisecond
@ -419,7 +579,7 @@ export class API {
throw new Error(
`Unable to create repository for organization '${
org.login
}'. Verify it exists and that you have permission to create a repository there.`
}'. Verify that it exists, that it's a paid organization, and that you have permission to create a repository there.`
)
}
throw e
@ -461,18 +621,84 @@ export class API {
}
}
/** Fetch the pull requests in the given repository. */
public async fetchPullRequests(
/** Fetch all open pull requests in the given repository. */
public async fetchAllOpenPullRequests(owner: string, name: string) {
const url = urlWithQueryString(`repos/${owner}/${name}/pulls`, {
state: 'open',
})
try {
return await this.fetchAll<IAPIPullRequest>(url)
} catch (e) {
log.warn(`failed fetching open PRs for repository ${owner}/${name}`, e)
throw e
}
}
/**
* Fetch all pull requests in the given repository that have been
* updated on or after the provided date.
*
* Note: The GitHub API doesn't support providing a last-updated
* limitation for PRs like it does for issues so we're emulating
* the issues API by sorting PRs descending by last updated and
* only grab as many pages as we need to until we no longer receive
* PRs that have been update more recently than the `since`
* parameter.
*
* If there's more than `maxResults` updated PRs since the last time
* we fetched this method will throw an error such that we can abort
* this strategy and commence loading all open PRs instead.
*/
public async fetchUpdatedPullRequests(
owner: string,
name: string,
state: 'open' | 'closed' | 'all'
): Promise<ReadonlyArray<IAPIPullRequest>> {
const url = urlWithQueryString(`repos/${owner}/${name}/pulls`, { state })
since: Date,
// 320 is chosen because with a ramp-up page size starting with
// a page size of 10 we'll reach 320 in exactly 7 pages. See
// getNextPagePathWithIncreasingPageSize
maxResults = 320
) {
const sinceTime = since.getTime()
const url = urlWithQueryString(`repos/${owner}/${name}/pulls`, {
state: 'all',
sort: 'updated',
direction: 'desc',
})
try {
const prs = await this.fetchAll<IAPIPullRequest>(url)
return prs
const prs = await this.fetchAll<IAPIPullRequest>(url, {
// We use a page size smaller than our default 100 here because we
// expect that the majority use case will return much less than
// 100 results. Given that as long as _any_ PR has changed we'll
// get the full list back (PRs doesn't support ?since=) we want
// to keep this number fairly conservative in order to not use
// up bandwidth needlessly while balancing it such that we don't
// have to use a lot of requests to update our database. We then
// ramp up the page size (see getNextPagePathWithIncreasingPageSize)
// if it turns out there's a lot of updated PRs.
perPage: 10,
getNextPagePath: getNextPagePathWithIncreasingPageSize,
continue(results) {
if (results.length >= maxResults) {
throw new MaxResultsError('got max pull requests, aborting')
}
// Given that we sort the results in descending order by their
// updated_at field we can safely say that if the last item
// is modified after our sinceTime then haven't reached the
// end of updated PRs.
const last = results[results.length - 1]
return last !== undefined && Date.parse(last.updated_at) > sinceTime
},
// We can't ignore errors here as that might mean that we haven't
// retrieved enough pages to fully capture the changes since the
// last time we updated. Ignoring errors here would mean that we'd
// store an incorrect lastUpdated field in the database.
suppressErrors: false,
})
return prs.filter(pr => Date.parse(pr.updated_at) >= sinceTime)
} catch (e) {
log.warn(`fetchPullRequests: failed for repository ${owner}/${name}`, e)
log.warn(`failed fetching updated PRs for repository ${owner}/${name}`, e)
throw e
}
}
@ -493,6 +719,23 @@ export class API {
return await parsedResponse<IAPIRefStatus>(response)
}
public async fetchProtectedBranches(
owner: string,
name: string
): Promise<ReadonlyArray<IAPIBranch>> {
const path = `repos/${owner}/${name}/branches?protected=true`
try {
const response = await this.request('GET', path)
return await parsedResponse<IAPIBranch[]>(response)
} catch (err) {
log.info(
`[fetchProtectedBranches] unable to list protected branches`,
err
)
return new Array<IAPIBranch>()
}
}
/**
* Authenticated requests to a paginating resource such as issues.
*
@ -500,31 +743,28 @@ export class API {
* pages when available, buffers all items and returns them in
* one array when done.
*/
private async fetchAll<T>(path: string): Promise<ReadonlyArray<T>> {
private async fetchAll<T>(path: string, options?: IFetchAllOptions<T>) {
const buf = new Array<T>()
const opts: IFetchAllOptions<T> = { perPage: 100, ...options }
const params = { per_page: `${opts.perPage}` }
const params = {
per_page: '100',
}
let nextPath: string | null = urlWithQueryString(path, params)
do {
const response = await this.request('GET', nextPath)
if (response.status === HttpStatusCode.NotFound) {
log.warn(`fetchAll: '${path}' returned a 404`)
return []
}
if (response.status === HttpStatusCode.NotModified) {
log.warn(`fetchAll: '${path}' returned a 304`)
return []
const response: Response = await this.request('GET', nextPath)
if (opts.suppressErrors !== false && !response.ok) {
log.warn(`fetchAll: '${path}' returned a ${response.status}`)
return buf
}
const items = await parsedResponse<ReadonlyArray<T>>(response)
if (items) {
buf.push(...items)
}
nextPath = getNextPagePath(response)
} while (nextPath)
nextPath = opts.getNextPagePath
? opts.getNextPagePath(response)
: getNextPagePathFromLink(response)
} while (nextPath && (!opts.continue || opts.continue(buf)))
return buf
}
@ -631,6 +871,11 @@ export enum AuthorizationResponseKind {
PersonalAccessTokenBlocked,
Error,
EnterpriseTooOld,
/**
* The API has indicated that the user is required to go through
* the web authentication flow.
*/
WebFlowRequired,
}
export type AuthorizationResponse =
@ -644,6 +889,7 @@ export type AuthorizationResponse =
| { kind: AuthorizationResponseKind.UserRequiresVerification }
| { kind: AuthorizationResponseKind.PersonalAccessTokenBlocked }
| { kind: AuthorizationResponseKind.EnterpriseTooOld }
| { kind: AuthorizationResponseKind.WebFlowRequired }
/**
* Create an authorization with the given login, password, and one-time
@ -667,7 +913,7 @@ export async function createAuthorization(
'POST',
'authorizations',
{
scopes: Scopes,
scopes: getOAuthScopesForEndpoint(endpoint),
client_id: ClientID,
client_secret: ClientSecret,
note: note,
@ -724,6 +970,8 @@ export async function createAuthorization(
) {
// Authorization API does not support providing personal access tokens
return { kind: AuthorizationResponseKind.PersonalAccessTokenBlocked }
} else if (response.status === 410) {
return { kind: AuthorizationResponseKind.WebFlowRequired }
} else if (response.status === 422) {
if (apiError.errors) {
for (const error of apiError.errors) {
@ -844,7 +1092,7 @@ export function getHTMLURL(endpoint: string): string {
// In the case of GitHub.com, the HTML site lives on the parent domain.
// E.g., https://api.github.com -> https://github.com
//
// Whereas with Enterprise, it lives on the same domain but without the
// Whereas with Enterprise Server, it lives on the same domain but without the
// API path:
// E.g., https://github.mycompany.com/api/v3 -> https://github.mycompany.com
//
@ -895,7 +1143,8 @@ export function getOAuthAuthorizationURL(
state: string
): string {
const urlBase = getHTMLURL(endpoint)
const scope = encodeURIComponent(Scopes.join(' '))
const scopes = getOAuthScopesForEndpoint(endpoint)
const scope = encodeURIComponent(scopes.join(' '))
return `${urlBase}/login/oauth/authorize?client_id=${ClientID}&scope=${scope}&state=${state}`
}
@ -923,3 +1172,9 @@ export async function requestOAuthToken(
return null
}
}
function getOAuthScopesForEndpoint(endpoint: string) {
return endpoint === getDotComAPIEndpoint()
? DotComOAuthScopes
: EnterpriseOAuthScopes
}

View file

@ -4,7 +4,7 @@ import { IDiff, ImageDiffType } from '../models/diff'
import { Repository, ILocalRepositoryState } from '../models/repository'
import { Branch, IAheadBehind } from '../models/branch'
import { Tip } from '../models/tip'
import { Commit } from '../models/commit'
import { Commit, CommitOneLine } from '../models/commit'
import { CommittedFileChange, WorkingDirectoryStatus } from '../models/status'
import { CloningRepository } from '../models/cloning-repository'
import { IMenu } from '../models/app-menu'
@ -35,6 +35,10 @@ import { ApplicationTheme } from '../ui/lib/application-theme'
import { IAccountRepositories } from './stores/api-repositories-store'
import { ManualConflictResolution } from '../models/manual-conflict-resolution'
import { Banner } from '../models/banner'
import { GitRebaseProgress } from '../models/rebase'
import { RebaseFlowStep } from '../models/rebase-flow-step'
import { IStashEntry } from '../models/stash-entry'
import { TutorialStep } from '../models/tutorial-step'
export enum SelectionType {
Repository,
@ -149,8 +153,8 @@ export interface IAppState {
/** The width of the commit summary column in the history view */
readonly commitSummaryWidth: number
/** Whether we should hide the toolbar (and show inverted window controls) */
readonly titleBarStyle: 'light' | 'dark'
/** The width of the files list in the stash view */
readonly stashedFilesWidth: number
/**
* Used to highlight access keys throughout the app when the
@ -171,8 +175,10 @@ export interface IAppState {
readonly askForConfirmationOnForcePush: boolean
/** The external editor to use when opening repositories */
readonly selectedExternalEditor?: ExternalEditor
readonly selectedExternalEditor: ExternalEditor | null
/** The current setting for whether the user has disable usage reports */
readonly optOutOfUsageTracking: boolean
/**
* A cached entry representing an external editor found on the user's machine:
*
@ -186,6 +192,9 @@ export interface IAppState {
/** What type of visual diff mode we should use to compare images */
readonly imageDiffType: ImageDiffType
/** Whether we should hide white space changes in diff */
readonly hideWhitespaceInDiff: boolean
/** The user's preferred shell. */
readonly selectedShell: Shell
@ -219,6 +228,9 @@ export interface IAppState {
* See the ApiRepositoriesStore for more details on loading repositories
*/
readonly apiRepositories: ReadonlyMap<Account, IAccountRepositories>
/** Which step the user is on in the Onboarding Tutorial */
readonly currentOnboardingTutorialStep: TutorialStep
}
export enum FoldoutType {
@ -248,10 +260,20 @@ export type AppMenuFoldout = {
openedWithAccessKey?: boolean
}
export type BranchFoldout = {
type: FoldoutType.Branch
/**
* A flag to indicate the user clicked the "switch branch" link when they
* saw the prompt about the current branch being protected.
*/
handleProtectedBranchWarning?: boolean
}
export type Foldout =
| { type: FoldoutType.Repository }
| { type: FoldoutType.Branch }
| { type: FoldoutType.AddMenu }
| BranchFoldout
| AppMenuFoldout
export enum RepositorySectionTab {
@ -341,6 +363,8 @@ export interface IRepositoryState {
readonly branchesState: IBranchesState
readonly rebaseState: IRebaseState
/**
* Mapping from lowercased email addresses to the associated GitHub user. Note
* that an email address may not have an associated GitHub user, or the user
@ -396,12 +420,6 @@ export interface IRepositoryState {
* null if no such operation is in flight.
*/
readonly revertProgress: IRevertProgress | null
/** The current branch filter text. */
readonly branchFilterText: string
/** The current pull request filter text. */
readonly pullRequestFilterText: string
}
export interface IBranchesState {
@ -455,6 +473,39 @@ export interface IBranchesState {
readonly rebasedBranches: ReadonlyMap<string, string>
}
/** State associated with a rebase being performed on a repository */
export interface IRebaseState {
/**
* The current step of the flow the user should see.
*
* `null` indicates that there is no rebase underway.
*/
readonly step: RebaseFlowStep | null
/**
* The underlying Git information associated with the current rebase
*
* This will be set to `null` when no base branch has been selected to
* initiate the rebase.
*/
readonly progress: GitRebaseProgress | null
/**
* The known range of commits that will be applied to the repository
*
* This will be set to `null` when no base branch has been selected to
* initiate the rebase.
*/
readonly commits: ReadonlyArray<CommitOneLine> | null
/**
* Whether the user has done work to resolve any conflicts as part of this
* rebase, as the rebase flow should confirm the user wishes to abort the
* rebase and lose that work.
*/
readonly userHasResolvedConflicts: boolean
}
export interface ICommitSelection {
/** The commit currently selected in the app */
readonly sha: string | null
@ -469,16 +520,38 @@ export interface ICommitSelection {
readonly diff: IDiff | null
}
export interface IChangesState {
readonly workingDirectory: WorkingDirectoryStatus
export enum ChangesSelectionKind {
WorkingDirectory = 'WorkingDirectory',
Stash = 'Stash',
}
export type ChangesWorkingDirectorySelection = {
readonly kind: ChangesSelectionKind.WorkingDirectory
/**
* The ID of the selected files. The files themselves can be looked up in
* `workingDirectory`.
* the `workingDirectory` property in `IChangesState`.
*/
readonly selectedFileIDs: string[]
readonly diff: IDiff | null
}
export type ChangesStashSelection = {
readonly kind: ChangesSelectionKind.Stash
/** Currently selected file in the stash diff viewer UI (aka the file we want to show the diff for) */
readonly selectedStashedFile: CommittedFileChange | null
/** Currently selected file's diff */
readonly selectedStashedFileDiff: IDiff | null
}
export type ChangesSelection =
| ChangesWorkingDirectorySelection
| ChangesStashSelection
export interface IChangesState {
readonly workingDirectory: WorkingDirectoryStatus
/** The commit message for a work-in-progress commit in the changes view. */
readonly commitMessage: ICommitMessage
@ -504,6 +577,23 @@ export interface IChangesState {
* The absence of a value means there is no merge or rebase conflict underway
*/
readonly conflictState: ConflictState | null
/**
* The latest GitHub Desktop stash entry for the current branch, or `null`
* if no stash exists for the current branch.
*/
readonly stashEntry: IStashEntry | null
/**
* The current selection state in the Changes view. Can be either
* working directory or a stash. In the case of a working directory
* selection multiple files may be selected. See `ChangesSelection`
* for more information about the differences between the two.
*/
readonly selection: ChangesSelection
/** `true` if the GitHub API reports that the branch is protected */
readonly currentBranchProtected: boolean
}
/**

View file

@ -1,13 +1,14 @@
import Dexie from 'dexie'
import { APIRefState, IAPIRefStatusItem } from '../api'
import { BaseDatabase } from './base-database'
import { GitHubRepository } from '../../models/github-repository'
import { fatalError, forceUnwrap } from '../fatal-error'
export interface IPullRequestRef {
/**
* The database ID of the GitHub repository in which this ref lives. It could
* be null if the repository was deleted on the site after the PR was opened.
*/
readonly repoId: number | null
readonly repoId: number
/** The name of the ref. */
readonly ref: string
@ -17,12 +18,6 @@ export interface IPullRequestRef {
}
export interface IPullRequest {
/**
* The database ID. This will be undefined if the pull request hasn't been
* inserted into the DB.
*/
readonly id?: number
/** The GitHub PR number. */
readonly number: number
@ -32,6 +27,9 @@ export interface IPullRequest {
/** The string formatted date on which the PR was created. */
readonly createdAt: string
/** The string formatted date on which the PR was created. */
readonly updatedAt: string
/** The ref from which the pull request's changes are coming. */
readonly head: IPullRequestRef
@ -42,35 +40,38 @@ export interface IPullRequest {
readonly author: string
}
export interface IPullRequestStatus {
/**
* Interface describing a record in the
* pullRequestsLastUpdated table.
*/
interface IPullRequestsLastUpdated {
/**
* The database ID. This will be undefined if the status hasn't been inserted
* into the DB.
* The primary key. Corresponds to the
* dbId property for the associated `GitHubRepository`
* instance.
*/
readonly id?: number
/** The ID of the pull request in the database. */
readonly pullRequestId: number
/** The status' state. */
readonly state: APIRefState
/** The number of statuses represented in this combined status. */
readonly totalCount: number
/** The SHA for which this status applies. */
readonly sha: string
readonly repoId: number
/**
* The list of statuses for this specific ref or undefined
* if the database object was created prior to status support
* being added in #3588
* The maximum value of the updated_at field on a
* pull request that we've seen in milliseconds since
* the epoch.
*/
readonly statuses?: ReadonlyArray<IAPIRefStatusItem>
readonly lastUpdated: number
}
/**
* Pull Requests are keyed on the ID of the GitHubRepository
* that they belong to _and_ the PR number.
*
* Index 0 contains the GitHubRepository dbID and index 1
* contains the PR number.
*/
export type PullRequestKey = [number, number]
export class PullRequestDatabase extends BaseDatabase {
public pullRequests!: Dexie.Table<IPullRequest, number>
public pullRequests!: Dexie.Table<IPullRequest, PullRequestKey>
public pullRequestsLastUpdated!: Dexie.Table<IPullRequestsLastUpdated, number>
public constructor(name: string, schemaVersion?: number) {
super(name, schemaVersion)
@ -95,5 +96,165 @@ export class PullRequestDatabase extends BaseDatabase {
// Remove the pullRequestStatus table
this.conditionalVersion(5, { pullRequestStatus: null })
// Delete pullRequestsTable in order to recreate it again
// in version 7 with a new primary key
this.conditionalVersion(6, { pullRequests: null })
// new primary key and a new table dedicated to keeping track
// of the most recently updated PR we've seen.
this.conditionalVersion(7, {
pullRequests: '[base.repoId+number]',
pullRequestsLastUpdated: 'repoId',
})
}
/**
* Removes all the pull requests associated with the given repository
* from the database. Also clears the last updated date for that repository
* if it exists.
*/
public async deleteAllPullRequestsInRepository(repository: GitHubRepository) {
const dbId = forceUnwrap(
"Can't delete PRs for repository, no dbId",
repository.dbID
)
await this.transaction(
'rw',
this.pullRequests,
this.pullRequestsLastUpdated,
async () => {
await this.clearLastUpdated(repository)
await this.pullRequests
.where('[base.repoId+number]')
.between([dbId], [dbId + 1])
.delete()
}
)
}
/**
* Removes all the given pull requests from the database.
*/
public async deletePullRequests(keys: PullRequestKey[]) {
// I believe this to be a bug in Dexie's type declarations.
// It definitely supports passing an array of keys but the
// type thinks that if it's an array it should be an array
// of void which I believe to be a mistake. Therefore we
// type it as any and hand it off to Dexie.
await this.pullRequests.bulkDelete(keys as any)
}
/**
* Inserts the given pull requests, overwriting any existing records
* in the process.
*/
public async putPullRequests(prs: IPullRequest[]) {
await this.pullRequests.bulkPut(prs)
}
/**
* Retrieve all PRs for the given repository.
*
* Note: This method will throw if the GitHubRepository hasn't
* yet been inserted into the database (i.e the dbID field is null).
*/
public getAllPullRequestsInRepository(repository: GitHubRepository) {
if (repository.dbID === null) {
return fatalError("Can't retrieve PRs for repository, no dbId")
}
return this.pullRequests
.where('[base.repoId+number]')
.between([repository.dbID], [repository.dbID + 1])
.toArray()
}
/**
* Get a single pull requests for a particular repository
*/
public getPullRequest(repository: GitHubRepository, prNumber: number) {
if (repository.dbID === null) {
return fatalError("Can't retrieve PRs for repository with a null dbID")
}
return this.pullRequests.get([repository.dbID, prNumber])
}
/**
* Gets a value indicating the most recently updated PR
* that we've seen for a particular repository.
*
* Note:
* This value might differ from max(updated_at) in the pullRequests
* table since the most recently updated PR we saw might have
* been closed and we only store open PRs in the pullRequests
* table.
*/
public async getLastUpdated(repository: GitHubRepository) {
if (repository.dbID === null) {
return fatalError("Can't retrieve PRs for repository with a null dbID")
}
const row = await this.pullRequestsLastUpdated.get(repository.dbID)
return row ? new Date(row.lastUpdated) : null
}
/**
* Clears the stored date for the most recently updated PR seen for
* a given repository.
*/
public async clearLastUpdated(repository: GitHubRepository) {
if (repository.dbID === null) {
throw new Error(
"Can't clear last updated PR for repository with a null dbID"
)
}
await this.pullRequestsLastUpdated.delete(repository.dbID)
}
/**
* Set a value indicating the most recently updated PR
* that we've seen for a particular repository.
*
* Note:
* This value might differ from max(updated_at) in the pullRequests
* table since the most recently updated PR we saw might have
* been closed and we only store open PRs in the pullRequests
* table.
*/
public async setLastUpdated(repository: GitHubRepository, lastUpdated: Date) {
if (repository.dbID === null) {
throw new Error("Can't set last updated for PR with a null dbID")
}
await this.pullRequestsLastUpdated.put({
repoId: repository.dbID,
lastUpdated: lastUpdated.getTime(),
})
}
}
/**
* Create a pull request key from a GitHub repository and a PR number.
*
* This method is mainly a helper function to ensure we don't
* accidentally swap the order of the repository id and the pr number
* if we were to create the key array manually.
*
* @param repository The GitHub repository to which this PR belongs
* @param prNumber The PR number as returned from the GitHub API
*/
export function getPullRequestKey(
repository: GitHubRepository,
prNumber: number
) {
const dbId = forceUnwrap(
`Can get key for PR, repository not inserted in database.`,
repository.dbID
)
return [dbId, prNumber] as PullRequestKey
}

View file

@ -23,13 +23,41 @@ export interface IDatabaseGitHubRepository {
readonly lastPruneDate: number | null
}
/** A record to track the protected branch information for a GitHub repository */
export interface IDatabaseProtectedBranch {
readonly repoId: number
/**
* The branch name associated with the branch protection settings
*
* NOTE: this is NOT a fully-qualified ref (i.e. `refs/heads/master`)
*/
readonly name: string
}
export interface IDatabaseRepository {
readonly id?: number | null
readonly gitHubRepositoryID: number | null
readonly path: string
readonly missing: boolean
/** The last time the stash entries were checked for the repository */
readonly lastStashCheckDate: number | null
/**
* True if the repository is a tutorial repository created as part
* of the onboarding flow. Tutorial repositories trigger a tutorial
* user experience which introduces new users to some core concepts
* of Git and GitHub.
*/
readonly isTutorialRepository?: boolean
}
/**
* Branches are keyed on the ID of the GitHubRepository that they belong to
* and the short name of the branch.
*/
type BranchKey = [number, string]
/** The repositories database. */
export class RepositoriesDatabase extends BaseDatabase {
/** The local repositories table. */
@ -38,6 +66,9 @@ export class RepositoriesDatabase extends BaseDatabase {
/** The GitHub repositories table. */
public gitHubRepositories!: Dexie.Table<IDatabaseGitHubRepository, number>
/** A table containing the names of protected branches per repository. */
public protectedBranches!: Dexie.Table<IDatabaseProtectedBranch, BranchKey>
/** The GitHub repository owners table. */
public owners!: Dexie.Table<IDatabaseOwner, number>
@ -74,6 +105,10 @@ export class RepositoriesDatabase extends BaseDatabase {
this.conditionalVersion(5, {
gitHubRepositories: '++id, name, &[ownerID+name], cloneURL',
})
this.conditionalVersion(6, {
protectedBranches: '[repoId+name], repoId',
})
}
}

View file

@ -6,8 +6,9 @@ import { assertNever } from '../fatal-error'
export enum ExternalEditor {
Atom = 'Atom',
MacVim = 'MacVim',
VisualStudioCode = 'Visual Studio Code',
VisualStudioCodeInsiders = 'Visual Studio Code (Insiders)',
VSCode = 'Visual Studio Code',
VSCodeInsiders = 'Visual Studio Code (Insiders)',
VSCodium = 'VSCodium',
SublimeText = 'Sublime Text',
BBEdit = 'BBEdit',
PhpStorm = 'PhpStorm',
@ -16,6 +17,7 @@ export enum ExternalEditor {
Brackets = 'Brackets',
WebStorm = 'WebStorm',
Typora = 'Typora',
CodeRunner = 'CodeRunner',
SlickEdit = 'SlickEdit',
}
@ -26,12 +28,17 @@ export function parse(label: string): ExternalEditor | null {
if (label === ExternalEditor.MacVim) {
return ExternalEditor.MacVim
}
if (label === ExternalEditor.VisualStudioCode) {
return ExternalEditor.VisualStudioCode
if (label === ExternalEditor.VSCode) {
return ExternalEditor.VSCode
}
if (label === ExternalEditor.VisualStudioCodeInsiders) {
return ExternalEditor.VisualStudioCodeInsiders
if (label === ExternalEditor.VSCodeInsiders) {
return ExternalEditor.VSCodeInsiders
}
if (label === ExternalEditor.VSCodium) {
return ExternalEditor.VSCodium
}
if (label === ExternalEditor.SublimeText) {
return ExternalEditor.SublimeText
}
@ -56,6 +63,9 @@ export function parse(label: string): ExternalEditor | null {
if (label === ExternalEditor.Typora) {
return ExternalEditor.Typora
}
if (label === ExternalEditor.CodeRunner) {
return ExternalEditor.CodeRunner
}
if (label === ExternalEditor.SlickEdit) {
return ExternalEditor.SlickEdit
}
@ -73,10 +83,12 @@ function getBundleIdentifiers(editor: ExternalEditor): ReadonlyArray<string> {
return ['com.github.atom']
case ExternalEditor.MacVim:
return ['org.vim.MacVim']
case ExternalEditor.VisualStudioCode:
case ExternalEditor.VSCode:
return ['com.microsoft.VSCode']
case ExternalEditor.VisualStudioCodeInsiders:
case ExternalEditor.VSCodeInsiders:
return ['com.microsoft.VSCodeInsiders']
case ExternalEditor.VSCodium:
return ['com.visualstudio.code.oss']
case ExternalEditor.SublimeText:
return ['com.sublimetext.3']
case ExternalEditor.BBEdit:
@ -93,6 +105,8 @@ function getBundleIdentifiers(editor: ExternalEditor): ReadonlyArray<string> {
return ['com.jetbrains.WebStorm']
case ExternalEditor.Typora:
return ['abnerworks.Typora']
case ExternalEditor.CodeRunner:
return ['com.krill.CodeRunner']
case ExternalEditor.SlickEdit:
return [
'com.slickedit.SlickEditPro2018',
@ -112,8 +126,17 @@ function getExecutableShim(
switch (editor) {
case ExternalEditor.Atom:
return Path.join(installPath, 'Contents', 'Resources', 'app', 'atom.sh')
case ExternalEditor.VisualStudioCode:
case ExternalEditor.VisualStudioCodeInsiders:
case ExternalEditor.VSCode:
case ExternalEditor.VSCodeInsiders:
return Path.join(
installPath,
'Contents',
'Resources',
'app',
'bin',
'code'
)
case ExternalEditor.VSCodium:
return Path.join(
installPath,
'Contents',
@ -140,6 +163,8 @@ function getExecutableShim(
return Path.join(installPath, 'Contents', 'MacOS', 'WebStorm')
case ExternalEditor.Typora:
return Path.join(installPath, 'Contents', 'MacOS', 'Typora')
case ExternalEditor.CodeRunner:
return Path.join(installPath, 'Contents', 'MacOS', 'CodeRunner')
case ExternalEditor.SlickEdit:
return Path.join(installPath, 'Contents', 'MacOS', 'vs')
default:
@ -181,6 +206,7 @@ export async function getAvailableEditors(): Promise<
macVimPath,
codePath,
codeInsidersPath,
codiumPath,
sublimePath,
bbeditPath,
phpStormPath,
@ -189,12 +215,14 @@ export async function getAvailableEditors(): Promise<
bracketsPath,
webStormPath,
typoraPath,
codeRunnerPath,
slickeditPath,
] = await Promise.all([
findApplication(ExternalEditor.Atom),
findApplication(ExternalEditor.MacVim),
findApplication(ExternalEditor.VisualStudioCode),
findApplication(ExternalEditor.VisualStudioCodeInsiders),
findApplication(ExternalEditor.VSCode),
findApplication(ExternalEditor.VSCodeInsiders),
findApplication(ExternalEditor.VSCodium),
findApplication(ExternalEditor.SublimeText),
findApplication(ExternalEditor.BBEdit),
findApplication(ExternalEditor.PhpStorm),
@ -203,6 +231,7 @@ export async function getAvailableEditors(): Promise<
findApplication(ExternalEditor.Brackets),
findApplication(ExternalEditor.WebStorm),
findApplication(ExternalEditor.Typora),
findApplication(ExternalEditor.CodeRunner),
findApplication(ExternalEditor.SlickEdit),
])
@ -215,16 +244,20 @@ export async function getAvailableEditors(): Promise<
}
if (codePath) {
results.push({ editor: ExternalEditor.VisualStudioCode, path: codePath })
results.push({ editor: ExternalEditor.VSCode, path: codePath })
}
if (codeInsidersPath) {
results.push({
editor: ExternalEditor.VisualStudioCodeInsiders,
editor: ExternalEditor.VSCodeInsiders,
path: codeInsidersPath,
})
}
if (codiumPath) {
results.push({ editor: ExternalEditor.VSCodium, path: codiumPath })
}
if (sublimePath) {
results.push({ editor: ExternalEditor.SublimeText, path: sublimePath })
}
@ -257,6 +290,10 @@ export async function getAvailableEditors(): Promise<
results.push({ editor: ExternalEditor.Typora, path: typoraPath })
}
if (codeRunnerPath) {
results.push({ editor: ExternalEditor.CodeRunner, path: codeRunnerPath })
}
if (slickeditPath) {
results.push({ editor: ExternalEditor.SlickEdit, path: slickeditPath })
}

View file

@ -5,8 +5,9 @@ import { assertNever } from '../fatal-error'
export enum ExternalEditor {
Atom = 'Atom',
VisualStudioCode = 'Visual Studio Code',
VisualStudioCodeInsiders = 'Visual Studio Code (Insiders)',
VSCode = 'Visual Studio Code',
VSCodeInsiders = 'Visual Studio Code (Insiders)',
VSCodium = 'VSCodium',
SublimeText = 'Sublime Text',
Typora = 'Typora',
SlickEdit = 'SlickEdit',
@ -17,12 +18,16 @@ export function parse(label: string): ExternalEditor | null {
return ExternalEditor.Atom
}
if (label === ExternalEditor.VisualStudioCode) {
return ExternalEditor.VisualStudioCode
if (label === ExternalEditor.VSCode) {
return ExternalEditor.VSCode
}
if (label === ExternalEditor.VisualStudioCodeInsiders) {
return ExternalEditor.VisualStudioCode
if (label === ExternalEditor.VSCodeInsiders) {
return ExternalEditor.VSCode
}
if (label === ExternalEditor.VSCodium) {
return ExternalEditor.VSCodium
}
if (label === ExternalEditor.SublimeText) {
@ -48,10 +53,12 @@ async function getEditorPath(editor: ExternalEditor): Promise<string | null> {
switch (editor) {
case ExternalEditor.Atom:
return getPathIfAvailable('/usr/bin/atom')
case ExternalEditor.VisualStudioCode:
case ExternalEditor.VSCode:
return getPathIfAvailable('/usr/bin/code')
case ExternalEditor.VisualStudioCodeInsiders:
case ExternalEditor.VSCodeInsiders:
return getPathIfAvailable('/usr/bin/code-insiders')
case ExternalEditor.VSCodium:
return getPathIfAvailable('/usr/bin/codium')
case ExternalEditor.SublimeText:
return getPathIfAvailable('/usr/bin/subl')
case ExternalEditor.Typora:
@ -84,13 +91,15 @@ export async function getAvailableEditors(): Promise<
atomPath,
codePath,
codeInsidersPath,
codiumPath,
sublimePath,
typoraPath,
slickeditPath,
] = await Promise.all([
getEditorPath(ExternalEditor.Atom),
getEditorPath(ExternalEditor.VisualStudioCode),
getEditorPath(ExternalEditor.VisualStudioCodeInsiders),
getEditorPath(ExternalEditor.VSCode),
getEditorPath(ExternalEditor.VSCodeInsiders),
getEditorPath(ExternalEditor.VSCodium),
getEditorPath(ExternalEditor.SublimeText),
getEditorPath(ExternalEditor.Typora),
getEditorPath(ExternalEditor.SlickEdit),
@ -101,14 +110,15 @@ export async function getAvailableEditors(): Promise<
}
if (codePath) {
results.push({ editor: ExternalEditor.VisualStudioCode, path: codePath })
results.push({ editor: ExternalEditor.VSCode, path: codePath })
}
if (codeInsidersPath) {
results.push({
editor: ExternalEditor.VisualStudioCode,
path: codeInsidersPath,
})
results.push({ editor: ExternalEditor.VSCode, path: codeInsidersPath })
}
if (codiumPath) {
results.push({ editor: ExternalEditor.VSCodium, path: codiumPath })
}
if (sublimePath) {

View file

@ -49,7 +49,7 @@ export async function getAvailableEditors(): Promise<
* be found (i.e. it has been removed).
*/
export async function findEditorOrDefault(
name?: string
name: string | null
): Promise<IFoundEditor<ExternalEditor> | null> {
const editors = await getAvailableEditors()
if (editors.length === 0) {

View file

@ -14,8 +14,11 @@ import { assertNever } from '../fatal-error'
export enum ExternalEditor {
Atom = 'Atom',
VisualStudioCode = 'Visual Studio Code',
VisualStudioCodeInsiders = 'Visual Studio Code (Insiders)',
AtomBeta = 'Atom Beta',
AtomNightly = 'Atom Nightly',
VSCode = 'Visual Studio Code',
VSCodeInsiders = 'Visual Studio Code (Insiders)',
VSCodium = 'Visual Studio Codium',
SublimeText = 'Sublime Text',
CFBuilder = 'ColdFusion Builder',
Typora = 'Typora',
@ -27,11 +30,20 @@ export function parse(label: string): ExternalEditor | null {
if (label === ExternalEditor.Atom) {
return ExternalEditor.Atom
}
if (label === ExternalEditor.VisualStudioCode) {
return ExternalEditor.VisualStudioCode
if (label === ExternalEditor.AtomBeta) {
return ExternalEditor.AtomBeta
}
if (label === ExternalEditor.VisualStudioCodeInsiders) {
return ExternalEditor.VisualStudioCodeInsiders
if (label === ExternalEditor.AtomNightly) {
return ExternalEditor.AtomNightly
}
if (label === ExternalEditor.VSCode) {
return ExternalEditor.VSCode
}
if (label === ExternalEditor.VSCodeInsiders) {
return ExternalEditor.VSCodeInsiders
}
if (label === ExternalEditor.VSCodium) {
return ExternalEditor.VSCodium
}
if (label === ExternalEditor.SublimeText) {
return ExternalEditor.SublimeText
@ -69,7 +81,23 @@ function getRegistryKeys(
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\atom',
},
]
case ExternalEditor.VisualStudioCode:
case ExternalEditor.AtomBeta:
return [
{
key: HKEY.HKEY_CURRENT_USER,
subKey:
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\atom-beta',
},
]
case ExternalEditor.AtomNightly:
return [
{
key: HKEY.HKEY_CURRENT_USER,
subKey:
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\atom-nightly',
},
]
case ExternalEditor.VSCode:
return [
// 64-bit version of VSCode (user) - provided by default in 64-bit Windows
{
@ -96,7 +124,7 @@ function getRegistryKeys(
'SOFTWARE\\WOW6432Node\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{F8A2A208-72B3-4D61-95FC-8A65D340689B}_is1',
},
]
case ExternalEditor.VisualStudioCodeInsiders:
case ExternalEditor.VSCodeInsiders:
return [
// 64-bit version of VSCode (user) - provided by default in 64-bit Windows
{
@ -123,6 +151,33 @@ function getRegistryKeys(
'SOFTWARE\\Wow6432Node\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{C26E74D1-022E-4238-8B9D-1E7564A36CC9}_is1',
},
]
case ExternalEditor.VSCodium:
return [
// 64-bit version of VSCodium (user)
{
key: HKEY.HKEY_CURRENT_USER,
subKey:
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{2E1F05D1-C245-4562-81EE-28188DB6FD17}_is1',
},
// 32-bit version of VSCodium (user)
{
key: HKEY.HKEY_CURRENT_USER,
subKey:
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{C6065F05-9603-4FC4-8101-B9781A25D88E}}_is1',
},
// 64-bit version of VSCodium (system)
{
key: HKEY.HKEY_LOCAL_MACHINE,
subKey:
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{D77B7E06-80BA-4137-BCF4-654B95CCEBC5}_is1',
},
// 32-bit version of VSCodium (system)
{
key: HKEY.HKEY_LOCAL_MACHINE,
subKey:
'SOFTWARE\\WOW6432Node\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{E34003BB-9E10-4501-8C11-BE3FAA83F23F}_is1',
},
]
case ExternalEditor.SublimeText:
return [
{
@ -258,10 +313,16 @@ function getExecutableShim(
switch (editor) {
case ExternalEditor.Atom:
return Path.join(installLocation, 'bin', 'atom.cmd') // remember, CMD must 'useShell'
case ExternalEditor.VisualStudioCode:
case ExternalEditor.AtomBeta:
return Path.join(installLocation, 'bin', 'atom-beta.cmd') // remember, CMD must 'useShell'
case ExternalEditor.AtomNightly:
return Path.join(installLocation, 'bin', 'atom-nightly.cmd') // remember, CMD must 'useShell'
case ExternalEditor.VSCode:
return Path.join(installLocation, 'bin', 'code.cmd') // remember, CMD must 'useShell'
case ExternalEditor.VisualStudioCodeInsiders:
case ExternalEditor.VSCodeInsiders:
return Path.join(installLocation, 'bin', 'code-insiders.cmd') // remember, CMD must 'useShell'
case ExternalEditor.VSCodium:
return Path.join(installLocation, 'bin', 'codium.cmd') // remember, CMD must 'useShell'
case ExternalEditor.SublimeText:
return Path.join(installLocation, 'subl.exe')
case ExternalEditor.CFBuilder:
@ -292,16 +353,22 @@ function isExpectedInstallation(
switch (editor) {
case ExternalEditor.Atom:
return displayName === 'Atom' && publisher === 'GitHub Inc.'
case ExternalEditor.VisualStudioCode:
case ExternalEditor.AtomBeta:
return displayName === 'Atom Beta' && publisher === 'GitHub Inc.'
case ExternalEditor.AtomNightly:
return displayName === 'Atom Nightly' && publisher === 'GitHub Inc.'
case ExternalEditor.VSCode:
return (
displayName.startsWith('Microsoft Visual Studio Code') &&
publisher === 'Microsoft Corporation'
)
case ExternalEditor.VisualStudioCodeInsiders:
case ExternalEditor.VSCodeInsiders:
return (
displayName.startsWith('Microsoft Visual Studio Code Insiders') &&
publisher === 'Microsoft Corporation'
)
case ExternalEditor.VSCodium:
return displayName === 'Visual Source Codium' && publisher === 'VSCodium'
case ExternalEditor.SublimeText:
return (
displayName === 'Sublime Text' && publisher === 'Sublime HQ Pty Ltd'
@ -345,7 +412,11 @@ function extractApplicationInformation(
editor: ExternalEditor,
keys: ReadonlyArray<RegistryValue>
): { displayName: string; publisher: string; installLocation: string } {
if (editor === ExternalEditor.Atom) {
if (
editor === ExternalEditor.Atom ||
editor === ExternalEditor.AtomBeta ||
editor === ExternalEditor.AtomNightly
) {
const displayName = getKeyOrEmpty(keys, 'DisplayName')
const publisher = getKeyOrEmpty(keys, 'Publisher')
const installLocation = getKeyOrEmpty(keys, 'InstallLocation')
@ -353,8 +424,8 @@ function extractApplicationInformation(
}
if (
editor === ExternalEditor.VisualStudioCode ||
editor === ExternalEditor.VisualStudioCodeInsiders
editor === ExternalEditor.VSCode ||
editor === ExternalEditor.VSCodeInsiders
) {
const displayName = getKeyOrEmpty(keys, 'DisplayName')
const publisher = getKeyOrEmpty(keys, 'Publisher')
@ -362,6 +433,13 @@ function extractApplicationInformation(
return { displayName, publisher, installLocation }
}
if (editor === ExternalEditor.VSCodium) {
const displayName = getKeyOrEmpty(keys, 'DisplayName')
const publisher = getKeyOrEmpty(keys, 'Publisher')
const installLocation = getKeyOrEmpty(keys, 'InstallLocation')
return { displayName, publisher, installLocation }
}
if (editor === ExternalEditor.SublimeText) {
let displayName = ''
let publisher = ''
@ -496,16 +574,22 @@ export async function getAvailableEditors(): Promise<
const [
atomPath,
atomBetaPath,
atomNightlyPath,
codePath,
codeInsidersPath,
codiumPath,
sublimePath,
cfBuilderPath,
typoraPath,
slickeditPath,
] = await Promise.all([
findApplication(ExternalEditor.Atom),
findApplication(ExternalEditor.VisualStudioCode),
findApplication(ExternalEditor.VisualStudioCodeInsiders),
findApplication(ExternalEditor.AtomBeta),
findApplication(ExternalEditor.AtomNightly),
findApplication(ExternalEditor.VSCode),
findApplication(ExternalEditor.VSCodeInsiders),
findApplication(ExternalEditor.VSCodium),
findApplication(ExternalEditor.SublimeText),
findApplication(ExternalEditor.CFBuilder),
findApplication(ExternalEditor.Typora),
@ -520,9 +604,25 @@ export async function getAvailableEditors(): Promise<
})
}
if (atomBetaPath) {
results.push({
editor: ExternalEditor.AtomBeta,
path: atomBetaPath,
usesShell: true,
})
}
if (atomNightlyPath) {
results.push({
editor: ExternalEditor.AtomNightly,
path: atomNightlyPath,
usesShell: true,
})
}
if (codePath) {
results.push({
editor: ExternalEditor.VisualStudioCode,
editor: ExternalEditor.VSCode,
path: codePath,
usesShell: true,
})
@ -530,12 +630,20 @@ export async function getAvailableEditors(): Promise<
if (codeInsidersPath) {
results.push({
editor: ExternalEditor.VisualStudioCodeInsiders,
editor: ExternalEditor.VSCodeInsiders,
path: codeInsidersPath,
usesShell: true,
})
}
if (codiumPath) {
results.push({
editor: ExternalEditor.VSCodium,
path: codiumPath,
usesShell: true,
})
}
if (sublimePath) {
results.push({
editor: ExternalEditor.SublimeText,

View file

@ -1,4 +1,7 @@
import { IAPIEmail } from './api'
import * as URL from 'url'
import { IAPIEmail, getDotComAPIEndpoint } from './api'
import { Account } from '../models/account'
/**
* Lookup a suitable email address to display in the application, based on the
@ -13,9 +16,9 @@ import { IAPIEmail } from './api'
*
* @param emails array of email addresses associated with an account
*/
export function lookupPreferredEmail(
emails: ReadonlyArray<IAPIEmail>
): IAPIEmail | null {
export function lookupPreferredEmail(account: Account): IAPIEmail | null {
const emails = account.emails
if (emails.length === 0) {
return null
}
@ -25,9 +28,12 @@ export function lookupPreferredEmail(
return primary
}
const stealthSuffix = `@${getStealthEmailHostForEndpoint(account.endpoint)}`
const noReply = emails.find(e =>
e.email.toLowerCase().endsWith('@users.noreply.github.com')
e.email.toLowerCase().endsWith(stealthSuffix)
)
if (noReply) {
return noReply
}
@ -40,7 +46,7 @@ export function lookupPreferredEmail(
*/
function isEmailPublic(email: IAPIEmail): boolean {
// If an email doesn't have a visibility setting it means it's coming from an
// older Enterprise server which doesn't have the concept of visiblity.
// older Enterprise Server which doesn't have the concept of visiblity.
return email.visibility === 'public' || !email.visibility
}
@ -58,3 +64,15 @@ export function getDefaultEmail(emails: ReadonlyArray<IAPIEmail>): string {
return emails[0].email || ''
}
/**
* Returns the stealth email host name for a given endpoint. The stealth
* email host is hardcoded to the subdomain users.noreply under the
* endpoint host.
*/
function getStealthEmailHostForEndpoint(endpoint: string) {
const url = URL.parse(endpoint)
return getDotComAPIEndpoint() !== endpoint
? `users.noreply.${url.hostname}`
: 'users.noreply.github.com'
}

View file

@ -1,5 +1,5 @@
/**
* The oldest officially supported version of GitHub Enterprise.
* The oldest officially supported version of GitHub Enterprise Server.
* This information is used in user-facing text and shouldn't be
* considered a hard limit, i.e. older versions of GitHub Enterprise
* might (and probably do) work just fine but this should be a fairly

View file

@ -37,18 +37,13 @@ export function enableRecurseSubmodulesFlag(): boolean {
return enableBetaFeatures()
}
/** Should the app set protocol.version=2 for any fetch/push/pull/clone operation? */
export function enableGitProtocolVersionTwo(): boolean {
return true
}
export function enableReadmeOverwriteWarning(): boolean {
return enableBetaFeatures()
}
/** Shoult the app automatically prune branches that are no longer actively being used */
/** Should the app automatically prune branches that are no longer actively being used */
export function enableBranchPruning(): boolean {
return enableBetaFeatures()
return true
}
/**
@ -60,11 +55,6 @@ export function enableBranchPruning(): boolean {
* just yet.
*/
export function enableNoChangesCreatePRBlankslateAction(): boolean {
return enableBetaFeatures()
}
/** Should the app detect and handle rebase conflicts when `pull.rebase` is set? */
export function enablePullWithRebase(): boolean {
return true
}
@ -73,10 +63,53 @@ export function enablePullWithRebase(): boolean {
* grouping and filtering (GitHub) repositories by owner/organization.
*/
export function enableGroupRepositoriesByOwner(): boolean {
return enableBetaFeatures()
return true
}
/** Should the app show the "rebase current branch" dialog? */
export function enableRebaseDialog(): boolean {
return enableDevelopmentFeatures()
return true
}
/** Should the app show the "stash changes" dialog? */
export function enableStashing(): boolean {
return true
}
/**
* Should the application query for branch protection information and store this
* to help the maintainers understand how broadly branch protections are
* encountered?
*/
export function enableBranchProtectionChecks(): boolean {
return true
}
/** Should the app detect Windows Subsystem for Linux as a valid shell? */
export function enableWSLDetection(): boolean {
return enableBetaFeatures()
}
/**
* Should the application warn the user when they are about to commit to a
* protected branch, and encourage them into a flow to move their changes to
* a new branch?
*
* As this builds upon existing branch protection features in the codebase, this
* flag is linked to to `enableBranchProtectionChecks()`.
*/
export function enableBranchProtectionWarningFlow(): boolean {
return enableBranchProtectionChecks() && enableDevelopmentFeatures()
}
export function enableHideWhitespaceInDiffOption(): boolean {
return true
}
/**
* Should we enable the onboarding tutorial. This includes the initial
* configuration of the tutorial repo as well as the tutorial itself.
*/
export function enableTutorial(): boolean {
return true
}

View file

@ -33,7 +33,7 @@ async function canAccessRepositoryUsingAPI(
* @param urlOrRepositoryAlias - the URL or repository alias whose account
* should be found
* @param accounts - the list of active GitHub and GitHub Enterprise
* accounts
* Server accounts
*/
export async function findAccountForRemoteURL(
urlOrRepositoryAlias: string,
@ -74,11 +74,11 @@ export async function findAccountForRemoteURL(
// This chunk of code is designed to sort the user's accounts in this order:
// - authenticated GitHub account
// - GitHub Enterprise accounts
// - GitHub Enterprise Server accounts
// - unauthenticated GitHub account (access public repositories)
//
// As this needs to be done efficiently, we consider endpoints not matching
// `getDotComAPIEndpoint()` to be GitHub Enterprise accounts, and accounts
// `getDotComAPIEndpoint()` to be GitHub Enterprise Server accounts, and accounts
// without a token to be unauthenticated.
const sortedAccounts = Array.from(allAccounts).sort((a1, a2) => {
if (a1.endpoint === getDotComAPIEndpoint()) {

View file

@ -3,9 +3,20 @@ type MergeOrPullConflictsErrorContext = {
readonly kind: 'merge' | 'pull'
/** The branch being merged into the current branch, "theirs" in Git terminology */
readonly theirBranch: string
/** The branch associated with the current tip of the repository, "ours" in Git terminology */
readonly currentBranch: string
}
type CheckoutBranchErrorContext = {
/** The Git operation that triggered the error */
readonly kind: 'checkout'
/** The branch associated with the current tip of the repository, "ours" in Git terminology */
readonly branchToCheckout: string
}
/** A custom shape of data for actions to provide to help with error handling */
export type GitErrorContext = MergeOrPullConflictsErrorContext
export type GitErrorContext =
| MergeOrPullConflictsErrorContext
| CheckoutBranchErrorContext

View file

@ -1,10 +1,11 @@
import { GitError as DugiteError } from 'dugite'
import { git } from './core'
import {
WorkingDirectoryFileChange,
AppFileStatusKind,
} from '../../models/status'
import { DiffType } from '../../models/diff'
import { Repository } from '../../models/repository'
import { Repository, WorkingTree } from '../../models/repository'
import { getWorkingDirectoryDiff } from './diff'
import { formatPatch } from '../patch-formatter'
@ -67,3 +68,35 @@ export async function applyPatchToIndex(
return Promise.resolve()
}
/**
* Test a patch to see if it will apply cleanly.
*
* @param workTree work tree (which should be checked out to a specific commit)
* @param patch a Git patch (or patch series) to try applying
* @returns whether the patch applies cleanly
*
* See `formatPatch` to generate a patch series from existing Git commits
*/
export async function checkPatch(
workTree: WorkingTree,
patch: string
): Promise<boolean> {
const result = await git(
['apply', '--check', '-'],
workTree.path,
'checkPatch',
{
stdin: patch,
stdinEncoding: 'utf8',
expectedErrors: new Set<DugiteError>([DugiteError.PatchDoesNotApply]),
}
)
if (result.gitError === DugiteError.PatchDoesNotApply) {
// other errors will be thrown if encountered, so this is fine for now
return false
}
return true
}

View file

@ -6,18 +6,6 @@ import { IGitAccount } from '../../models/git-account'
import { envForAuthentication } from './authentication'
import { formatAsLocalRef } from './refs'
export interface IMergedBranch {
/**
* The canonical reference to the merged branch
*/
readonly canonicalRef: string
/**
* The full-length Object ID (SHA) in HEX (32 chars)
*/
readonly sha: string
}
/**
* Create a new branch from the given start point.
*
@ -30,19 +18,17 @@ export interface IMergedBranch {
export async function createBranch(
repository: Repository,
name: string,
startPoint?: string
startPoint: string | null
): Promise<Branch | null> {
const args = startPoint ? ['branch', name, startPoint] : ['branch', name]
const args =
startPoint !== null ? ['branch', name, startPoint] : ['branch', name]
try {
await git(args, repository.path, 'createBranch')
const branches = await getBranches(repository, `refs/heads/${name}`)
if (branches.length > 0) {
return branches[0]
}
} catch (err) {
log.error('createBranch failed', err)
await git(args, repository.path, 'createBranch')
const branches = await getBranches(repository, `refs/heads/${name}`)
if (branches.length > 0) {
return branches[0]
}
return null
}
@ -183,11 +169,12 @@ export async function getBranchesPointedAt(
*
* @param repository The repository in which to search
* @param branchName The to be used as the base branch
* @returns map of branch canonical refs paired to its sha
*/
export async function getMergedBranches(
repository: Repository,
branchName: string
): Promise<ReadonlyArray<IMergedBranch>> {
): Promise<Map<string, string>> {
const canonicalBranchRef = formatAsLocalRef(branchName)
const args = [
@ -202,7 +189,7 @@ export async function getMergedBranches(
// Remove the trailing newline
lines.splice(-1, 1)
const mergedBranches = new Array<IMergedBranch>()
const mergedBranches = new Map<string, string>()
for (const line of lines) {
const [sha, canonicalRef] = line.split('\0')
@ -217,7 +204,7 @@ export async function getMergedBranches(
continue
}
mergedBranches.push({ sha, canonicalRef })
mergedBranches.set(canonicalRef, sha)
}
return mergedBranches

View file

@ -61,7 +61,7 @@ export async function checkoutBranch(
account: IGitAccount | null,
branch: Branch,
progressCallback?: ProgressCallback
): Promise<void> {
): Promise<true> {
let opts: IGitExecutionOptions = {
env: envForAuthentication(account),
expectedErrors: AuthenticationErrors,
@ -97,6 +97,9 @@ export async function checkoutBranch(
)
await git(args, repository.path, 'checkoutBranch', opts)
// we return `true` here so `GitStore.performFailableGitOperation`
// will return _something_ differentiable from `undefined` if this succeeds
return true
}
/** Check out the paths at HEAD. */

View file

@ -7,7 +7,6 @@ import {
import { assertNever } from '../fatal-error'
import { getDotComAPIEndpoint } from '../api'
import { enableGitProtocolVersionTwo } from '../feature-flag'
import { IGitAccount } from '../../models/git-account'
@ -149,16 +148,18 @@ export async function git(
}
// The caller should either handle this error, or expect that exit code.
const errorMessage = []
const errorMessage = new Array<string>()
errorMessage.push(
`\`git ${args.join(' ')}\` exited with an unexpected code: ${exitCode}.`
)
if (result.stdout) {
errorMessage.push('stdout:')
errorMessage.push(result.stdout)
}
if (result.stderr) {
errorMessage.push('stderr:')
errorMessage.push(result.stderr)
}
@ -180,9 +181,16 @@ function getDescriptionForError(error: DugiteError): string {
case DugiteError.SSHAuthenticationFailed:
case DugiteError.SSHPermissionDenied:
case DugiteError.HTTPSAuthenticationFailed:
return `Authentication failed. You may not have permission to access the repository or the repository may have been archived. Open ${
__DARWIN__ ? 'preferences' : 'options'
} and verify that you're signed in with an account that has permission to access this repository.`
const menuHint = __DARWIN__
? 'GitHub Desktop > Preferences.'
: 'File > Options.'
return `Authentication failed. Some common reasons include:
- You are not logged in to your account: see ${menuHint}
- You may need to log out and log back in to refresh your token.
- You do not have permission to access this repository.
- The repository is archived on GitHub. Check the repository settings to confirm you are still permitted to push commits.
- If you use SSH authentication, check that your key is added to the ssh-agent and associated with your account.`
case DugiteError.RemoteDisconnection:
return 'The remote disconnected. Check your Internet connection and try again.'
case DugiteError.HostDown:
@ -267,7 +275,7 @@ function getDescriptionForError(error: DugiteError): string {
case DugiteError.NoExistingRemoteBranch:
return 'The remote branch does not exist.'
case DugiteError.LocalChangesOverwritten:
return 'Some of your changes would be overwritten.'
return 'Unable to switch branches as there are working directory changes which would be overwritten. Please commit or stash your changes.'
case DugiteError.UnresolvedConflicts:
return 'There are unresolved conflicts in the working directory.'
default:
@ -306,10 +314,6 @@ export async function gitNetworkArguments(
'credential.helper=',
]
if (!enableGitProtocolVersionTwo()) {
return baseArgs
}
if (account === null) {
return baseArgs
}

View file

@ -98,11 +98,13 @@ const imageFileExtensions = new Set([
export async function getCommitDiff(
repository: Repository,
file: FileChange,
commitish: string
commitish: string,
hideWhitespaceInDiff: boolean = false
): Promise<IDiff> {
const args = [
'log',
commitish,
...(hideWhitespaceInDiff ? ['-w'] : []),
'-m',
'-1',
'--first-parent',

View file

@ -0,0 +1,25 @@
import { revRange } from './rev-list'
import { Repository } from '../../models/repository'
import { spawnAndComplete } from './spawn'
/**
* Generate a patch representing the changes associated with a range of commits
*
* @param repository where to generate path from
* @param base starting commit in range
* @param head ending commit in rage
* @returns patch generated
*/
export async function formatPatch(
repository: Repository,
base: string,
head: string
): Promise<string> {
const range = revRange(base, head)
const { output } = await spawnAndComplete(
['format-patch', '--unified=1', '--minimal', '--stdout', range],
repository.path,
'formatPatch'
)
return output.toString('utf8')
}

View file

@ -33,3 +33,5 @@ export * from './submodule'
export * from './interpret-trailers'
export * from './gitignore'
export * from './rebase'
export * from './format-patch'
export * from './worktree'

View file

@ -179,11 +179,23 @@ export async function getChangedFiles(
]
const result = await git(args, repository.path, 'getChangedFiles')
const out = result.stdout
const lines = out.split('\0')
return parseChangedFiles(result.stdout, sha)
}
/**
* Parses git `log` or `diff` output into a list of changed files
* (see `getChangedFiles` for an example of use)
*
* @param stdout raw ouput from a git `-z` and `--name-status` flags
* @param committish commitish command was run against
*/
export function parseChangedFiles(
stdout: string,
committish: string
): ReadonlyArray<CommittedFileChange> {
const lines = stdout.split('\0')
// Remove the trailing empty line
lines.splice(-1, 1)
const files: CommittedFileChange[] = []
for (let i = 0; i < lines.length; i++) {
const statusText = lines[i]
@ -201,7 +213,7 @@ export async function getChangedFiles(
const path = lines[++i]
files.push(new CommittedFileChange(path, status, sha))
files.push(new CommittedFileChange(path, status, committish))
}
return files

View file

@ -6,7 +6,7 @@ import { GitError } from 'dugite'
import { Repository } from '../../models/repository'
import { Branch } from '../../models/branch'
import { MergeResult } from '../../models/merge'
import { ComputedActionKind } from '../../models/action'
import { ComputedAction } from '../../models/computed-action'
import { parseMergeResult } from '../merge-tree-parser'
import { spawnAndComplete } from './spawn'
@ -78,11 +78,11 @@ export async function mergeTree(
const mergeBase = await getMergeBase(repository, ours.tip.sha, theirs.tip.sha)
if (mergeBase === null) {
return { kind: ComputedActionKind.Invalid }
return { kind: ComputedAction.Invalid }
}
if (mergeBase === ours.tip.sha || mergeBase === theirs.tip.sha) {
return { kind: ComputedActionKind.Clean, entries: [] }
return { kind: ComputedAction.Clean, entries: [] }
}
const result = await spawnAndComplete(
@ -95,7 +95,7 @@ export async function mergeTree(
if (output.length === 0) {
// the merge commit will be empty - this is fine!
return { kind: ComputedActionKind.Clean, entries: [] }
return { kind: ComputedAction.Clean, entries: [] }
}
return parseMergeResult(output)

View file

@ -9,10 +9,7 @@ import { IPullProgress } from '../../models/progress'
import { IGitAccount } from '../../models/git-account'
import { PullProgressParser, executionOptionsWithProgress } from '../progress'
import { envForAuthentication, AuthenticationErrors } from './authentication'
import {
enableRecurseSubmodulesFlag,
enablePullWithRebase,
} from '../feature-flag'
import { enableRecurseSubmodulesFlag } from '../feature-flag'
async function getPullArgs(
repository: Repository,
@ -24,10 +21,6 @@ async function getPullArgs(
const args = [...networkArguments, 'pull']
if (!enablePullWithRebase()) {
args.push('--no-rebase')
}
if (enableRecurseSubmodulesFlag()) {
args.push('--recurse-submodules')
}

View file

@ -5,13 +5,18 @@ import { GitError } from 'dugite'
import * as byline from 'byline'
import { Repository } from '../../models/repository'
import { RebaseContext, RebaseProgressOptions } from '../../models/rebase'
import {
RebaseInternalState,
RebaseProgressOptions,
GitRebaseProgress,
} from '../../models/rebase'
import { IRebaseProgress } from '../../models/progress'
import {
WorkingDirectoryFileChange,
AppFileStatusKind,
} from '../../models/status'
import { ManualConflictResolution } from '../../models/manual-conflict-resolution'
import { CommitOneLine } from '../../models/commit'
import { merge } from '../merge'
import { formatRebaseValue } from '../rebase'
@ -20,6 +25,39 @@ import { git, IGitResult, IGitExecutionOptions } from './core'
import { stageManualConflictResolution } from './stage'
import { stageFiles } from './update-index'
import { getStatus } from './status'
import { getCommitsInRange } from './rev-list'
import { Branch } from '../../models/branch'
/** The app-specific results from attempting to rebase a repository */
export enum RebaseResult {
/**
* Git completed the rebase without reporting any errors, and the caller can
* signal success to the user.
*/
CompletedWithoutError = 'CompletedWithoutError',
/**
* The rebase encountered conflicts while attempting to rebase, and these
* need to be resolved by the user before the rebase can continue.
*/
ConflictsEncountered = 'ConflictsEncountered',
/**
* The rebase was not able to continue as tracked files were not staged in
* the index.
*/
OutstandingFilesNotStaged = 'OutstandingFilesNotStaged',
/**
* The rebase was not attempted because it could not check the status of the
* repository. The caller needs to confirm the repository is in a usable
* state.
*/
Aborted = 'Aborted',
/**
* An unexpected error as part of the rebase flow was caught and handled.
*
* Check the logs to find the relevant Git details.
*/
Error = 'Error',
}
/**
* Check the `.git/REBASE_HEAD` file exists in a repository to confirm
@ -31,16 +69,16 @@ function isRebaseHeadSet(repository: Repository) {
}
/**
* Detect and build up the context about the rebase being performed on a
* repository. This information is required to help Desktop display information
* to the user about the current action as well as the options available.
* Get the internal state about the rebase being performed on a repository. This
* information is required to help Desktop display information to the user
* about the current action as well as the options available.
*
* Returns `null` if no rebase is detected, or if the expected information
* cannot be found in the repository.
*/
export async function getRebaseContext(
export async function getRebaseInternalState(
repository: Repository
): Promise<RebaseContext | null> {
): Promise<RebaseInternalState | null> {
const isRebase = await isRebaseHeadSet(repository)
if (!isRebase) {
@ -89,6 +127,128 @@ export async function getRebaseContext(
return null
}
/**
* Inspect the `.git/rebase-apply` folder and convert the current rebase state
* into data that can be provided to the rebase flow to update the application
* state.
*
* This is required when Desktop is not responsible for initiating the rebase:
*
* - when a rebase outside Desktop encounters conflicts
* - when a `git pull --rebase` was run and encounters conflicts
*
*/
export async function getRebaseSnapshot(
repository: Repository
): Promise<{
progress: GitRebaseProgress
commits: ReadonlyArray<CommitOneLine>
} | null> {
const rebaseHead = await isRebaseHeadSet(repository)
if (!rebaseHead) {
return null
}
let next: number = -1
let last: number = -1
let originalBranchTip: string | null = null
let baseBranchTip: string | null = null
// if the repository is in the middle of a rebase `.git/rebase-apply` will
// contain all the patches of commits that are being rebased into
// auto-incrementing files, e.g. `0001`, `0002`, `0003`, etc ...
try {
// this contains the patch number that was recently applied to the repository
const nextText = await FSE.readFile(
Path.join(repository.path, '.git', 'rebase-apply', 'next'),
'utf8'
)
next = parseInt(nextText, 10)
if (isNaN(next)) {
log.warn(
`[getCurrentProgress] found '${nextText}' in .git/rebase-apply/next which could not be parsed to a valid number`
)
next = -1
}
// this contains the total number of patches to be applied to the repository
const lastText = await FSE.readFile(
Path.join(repository.path, '.git', 'rebase-apply', 'last'),
'utf8'
)
last = parseInt(lastText, 10)
if (isNaN(last)) {
log.warn(
`[getCurrentProgress] found '${lastText}' in .git/rebase-apply/last which could not be parsed to a valid number`
)
last = -1
}
originalBranchTip = await FSE.readFile(
Path.join(repository.path, '.git', 'rebase-apply', 'orig-head'),
'utf8'
)
originalBranchTip = originalBranchTip.trim()
baseBranchTip = await FSE.readFile(
Path.join(repository.path, '.git', 'rebase-apply', 'onto'),
'utf8'
)
baseBranchTip = baseBranchTip.trim()
} catch {}
if (
next > 0 &&
last > 0 &&
originalBranchTip !== null &&
baseBranchTip !== null
) {
const percentage = next / last
const value = formatRebaseValue(percentage)
const commits = await getCommitsInRange(
repository,
baseBranchTip,
originalBranchTip
)
if (commits === null || commits.length === 0) {
return null
}
// this number starts from 1, but our array of commits starts from 0
const nextCommitIndex = next - 1
const hasValidCommit =
commits.length > 0 &&
nextCommitIndex >= 0 &&
nextCommitIndex <= commits.length
const currentCommitSummary = hasValidCommit
? commits[nextCommitIndex].summary
: null
return {
progress: {
value,
rebasedCommitCount: next,
totalCommitCount: last,
currentCommitSummary,
},
commits,
}
}
return null
}
/**
* Attempt to read the `.git/REBASE_HEAD` file inside a repository to confirm
* the rebase is still active.
@ -127,12 +287,17 @@ class GitRebaseParser {
return null
}
const commitSummary = match[1]
const currentCommitSummary = match[1]
this.rebasedCommitCount++
const progress = this.rebasedCommitCount / this.totalCommitCount
const value = formatRebaseValue(progress)
// TODO: dig into why we sometimes get an extra progress event reported
if (this.rebasedCommitCount > this.totalCommitCount) {
this.rebasedCommitCount = this.totalCommitCount
}
return {
kind: 'rebase',
title: `Rebasing commit ${this.rebasedCommitCount} of ${
@ -141,7 +306,7 @@ class GitRebaseParser {
value,
rebasedCommitCount: this.rebasedCommitCount,
totalCommitCount: this.totalCommitCount,
commitSummary,
currentCommitSummary,
}
}
}
@ -186,19 +351,38 @@ function configureOptionsForRebase(
*/
export async function rebase(
repository: Repository,
baseBranch: string,
targetBranch: string,
progress?: RebaseProgressOptions
baseBranch: Branch,
targetBranch: Branch,
progressCallback?: (progress: IRebaseProgress) => void
): Promise<RebaseResult> {
const options = configureOptionsForRebase(
{
expectedErrors: new Set([GitError.RebaseConflicts]),
},
progress
)
const baseOptions: IGitExecutionOptions = {
expectedErrors: new Set([GitError.RebaseConflicts]),
}
let options = baseOptions
if (progressCallback !== undefined) {
const commits = await getCommitsInRange(
repository,
baseBranch.tip.sha,
targetBranch.tip.sha
)
if (commits === null) {
return RebaseResult.Error
}
const totalCommitCount = commits.length
options = configureOptionsForRebase(baseOptions, {
rebasedCommitCount: 0,
totalCommitCount,
progressCallback,
})
}
const result = await git(
['rebase', baseBranch, targetBranch],
['rebase', baseBranch.name, targetBranch.name],
repository.path,
'rebase',
options
@ -212,37 +396,6 @@ export async function abortRebase(repository: Repository) {
await git(['rebase', '--abort'], repository.path, 'abortRebase')
}
/** The app-specific results from attempting to rebase a repository */
export enum RebaseResult {
/**
* Git completed the rebase without reporting any errors, and the caller can
* signal success to the user.
*/
CompletedWithoutError = 'CompletedWithoutError',
/**
* The rebase encountered conflicts while attempting to rebase, and these
* need to be resolved by the user before the rebase can continue.
*/
ConflictsEncountered = 'ConflictsEncountered',
/**
* The rebase was not able to continue as tracked files were not staged in
* the index.
*/
OutstandingFilesNotStaged = 'OutstandingFilesNotStaged',
/**
* The rebase was not attempted because it could not check the status of the
* repository. The caller needs to confirm the repository is in a usable
* state.
*/
Aborted = 'Aborted',
/**
* An unexpected error as part of the rebase flow was caught and handled.
*
* Check the logs to find the relevant Git details.
*/
Error = 'Error',
}
function parseRebaseResult(result: IGitResult): RebaseResult {
if (result.exitCode === 0) {
return RebaseResult.CompletedWithoutError
@ -271,7 +424,7 @@ export async function continueRebase(
repository: Repository,
files: ReadonlyArray<WorkingDirectoryFileChange>,
manualResolutions: ReadonlyMap<string, ManualConflictResolution> = new Map(),
progress?: RebaseProgressOptions
progressCallback?: (progress: IRebaseProgress) => void
): Promise<RebaseResult> {
const trackedFiles = files.filter(f => {
return f.status.kind !== AppFileStatusKind.Untracked
@ -284,7 +437,7 @@ export async function continueRebase(
await stageManualConflictResolution(repository, file, resolution)
} else {
log.error(
`couldn't find file ${path} even though there's a manual resolution for it`
`[continueRebase] couldn't find file ${path} even though there's a manual resolution for it`
)
}
}
@ -294,10 +447,9 @@ export async function continueRebase(
await stageFiles(repository, otherFiles)
const status = await getStatus(repository)
if (status == null) {
log.warn(
`[rebase] unable to get status after staging changes, skipping any other steps`
`[continueRebase] unable to get status after staging changes, skipping any other steps`
)
return RebaseResult.Aborted
}
@ -311,15 +463,34 @@ export async function continueRebase(
f => f.status.kind !== AppFileStatusKind.Untracked
)
const options = configureOptionsForRebase(
{
expectedErrors: new Set([
GitError.RebaseConflicts,
GitError.UnresolvedConflicts,
]),
},
progress
)
const baseOptions: IGitExecutionOptions = {
expectedErrors: new Set([
GitError.RebaseConflicts,
GitError.UnresolvedConflicts,
]),
}
let options = baseOptions
if (progressCallback !== undefined) {
const snapshot = await getRebaseSnapshot(repository)
if (snapshot === null) {
log.warn(
`[continueRebase] unable to get rebase status, skipping any other steps`
)
return RebaseResult.Aborted
}
const { progress } = snapshot
const { rebasedCommitCount, totalCommitCount } = progress
options = configureOptionsForRebase(baseOptions, {
rebasedCommitCount,
totalCommitCount,
progressCallback,
})
}
if (trackedFilesAfter.length === 0) {
log.warn(

View file

@ -64,15 +64,19 @@ export async function getRecentBranches(
return [...names]
}
const noCommitsOnBranchRe = new RegExp(
"fatal: your current branch '.*' does not have any commits yet"
)
/**
* Gets the distinct list of branches that have been checked out after a specific date
* Returns a map keyed on branch names
*
* @param repository the repository who's reflog you want to check
* @param afterDate the minimum date a checkout has to occur
* @param afterDate filters checkouts so that only those occuring on or after this date are returned
* @returns map of branch name -> checkout date
*/
export async function getCheckoutsAfterDate(
export async function getBranchCheckouts(
repository: Repository,
afterDate: Date
): Promise<Map<string, Date>> {
@ -80,7 +84,7 @@ export async function getCheckoutsAfterDate(
const regex = new RegExp(
/^[a-z0-9]{40}\sHEAD@{(.*)}\scheckout: moving from\s.*\sto\s(.*)$/
)
const gitOutput = await git(
const result = await git(
[
'reflog',
'--date=iso',
@ -90,10 +94,21 @@ export async function getCheckoutsAfterDate(
'--',
],
repository.path,
'getCheckoutsAfterDate'
'getCheckoutsAfterDate',
{ successExitCodes: new Set([0, 128]) }
)
const checkouts = new Map<string, Date>()
const lines = gitOutput.stdout.split('\n')
// edge case where orphaned branch is created but Git raises error when
// reading the reflog on this new branch as it has no commits
//
// see https://github.com/desktop/desktop/issues/7983 for more information
if (result.exitCode === 128 && noCommitsOnBranchRe.test(result.stderr)) {
return checkouts
}
const lines = result.stdout.split('\n')
for (const line of lines) {
const parsedLine = regex.exec(line)

View file

@ -2,6 +2,7 @@ import { GitError } from 'dugite'
import { git } from './core'
import { Repository } from '../../models/repository'
import { Branch, BranchType, IAheadBehind } from '../../models/branch'
import { CommitOneLine } from '../../models/commit'
/**
* Convert two refs into the Git range syntax representing the set of commits
@ -91,3 +92,70 @@ export async function getBranchAheadBehind(
const range = revSymmetricDifference(branch.name, upstream)
return getAheadBehind(repository, range)
}
/**
* Get a list of commits from the target branch that do not exist on the base
* branch, ordered how they will be applied to the base branch.
*
* This emulates how `git rebase` initially determines what will be applied to
* the repository.
*
* Returns `null` when the rebase is not possible to perform, because of a
* missing commit ID
*/
export async function getCommitsInRange(
repository: Repository,
baseBranchSha: string,
targetBranchSha: string
): Promise<ReadonlyArray<CommitOneLine> | null> {
const range = revRange(baseBranchSha, targetBranchSha)
const args = [
'rev-list',
range,
'--reverse',
// the combination of these two arguments means each line of the stdout
// will contain the full commit sha and a commit summary
`--oneline`,
`--no-abbrev-commit`,
'--',
]
const options = {
expectedErrors: new Set<GitError>([GitError.BadRevision]),
}
const result = await git(args, repository.path, 'getCommitsInRange', options)
if (result.gitError === GitError.BadRevision) {
// BadRevision can be raised here if git rev-list is unable to resolve a ref
// to a commit ID, so we need to signal to the caller that this rebase is
// not possible to perform
log.warn(
'Unable to rebase these branches because one or both of the refs do not exist in the repository'
)
return null
}
const lines = result.stdout.split('\n')
const commits = new Array<CommitOneLine>()
const commitSummaryRe = /^([a-z0-9]{40}) (.*)$/
for (const line of lines) {
const match = commitSummaryRe.exec(line)
if (match !== null && match.length === 3) {
const sha = match[1]
const summary = match[2]
commits.push({
sha,
summary,
})
}
}
return commits
}

View file

@ -24,18 +24,18 @@ export async function stageManualConflictResolution(
repository: Repository,
file: WorkingDirectoryFileChange,
manualResolution: ManualConflictResolution
): Promise<boolean> {
): Promise<void> {
const { status } = file
// if somehow the file isn't in a conflicted state
if (!isConflictedFileStatus(status)) {
log.error(`tried to manually resolve unconflicted file (${file.path})`)
return false
return
}
if (!isManualConflict(status)) {
log.error(
`tried to manually resolve conflicted file with markers (${file.path})`
)
return false
return
}
const chosen =
@ -43,28 +43,29 @@ export async function stageManualConflictResolution(
? status.entry.them
: status.entry.us
let exitCode: number = -1
switch (chosen) {
case GitStatusEntry.Deleted: {
exitCode = (await git(
['rm', file.path],
repository.path,
'removeConflictedFile'
)).exitCode
await git(['rm', file.path], repository.path, 'removeConflictedFile')
break
}
case GitStatusEntry.Added: {
await git(['add', file.path], repository.path, 'addConflictedFile')
break
}
case GitStatusEntry.Added:
case GitStatusEntry.UpdatedButUnmerged: {
exitCode = (await git(
['add', file.path],
const choiceFlag =
manualResolution === ManualConflictResolutionKind.theirs
? 'theirs'
: 'ours'
await git(
['checkout', `--${choiceFlag}`, '--', file.path],
repository.path,
'addConflictedFile'
)).exitCode
'checkoutConflictedFile'
)
await git(['add', file.path], repository.path, 'addConflictedFile')
break
}
default:
assertNever(chosen, 'unnacounted for git status entry possibility')
}
return exitCode === 0
}

View file

@ -1,84 +1,287 @@
import { git } from '.'
import { GitError as DugiteError } from 'dugite'
import { git, GitError } from './core'
import { Repository } from '../../models/repository'
import {
IStashEntry,
StashedChangesLoadStates,
StashedFileChanges,
} from '../../models/stash-entry'
import {
WorkingDirectoryFileChange,
CommittedFileChange,
} from '../../models/status'
import { parseChangedFiles } from './log'
import { stageFiles } from './update-index'
export const DesktopStashEntryMarker = '!!GitHub_Desktop'
export interface IStashEntry {
/** The name of the branch at the time the entry was created. */
readonly branchName: string
/** The SHA of the commit object created as a result of stashing. */
readonly stashSha: string
}
/** RegEx for parsing out the stash SHA and message */
const stashEntryRe = /^([0-9a-f]{40})@(.+)$/
/**
* RegEx for determining if a stash entry is created by Desktop
*
* This is done by looking for a magic string with the following
* format: `!!GitHub_Desktop<branch@commit>`
* format: `!!GitHub_Desktop<branch>`
*/
const stashEntryMessageRe = /^!!GitHub_Desktop<(.+)@([0-9|a-z|A-Z]{40})>$/
const desktopStashEntryMessageRe = /!!GitHub_Desktop<(.+)>$/
type StashResult = {
/** The stash entries created by Desktop */
readonly desktopEntries: ReadonlyArray<IStashEntry>
/**
* The total amount of stash entries,
* i.e. stash entries created both by Desktop and outside of Desktop
*/
readonly stashEntryCount: number
}
/**
* Get the list of stash entries created by Desktop in the current repository
* using the default ordering of refs (which is LIFO ordering),
* as well as the total amount of stash entries.
*/
export async function getDesktopStashEntries(
repository: Repository
): Promise<ReadonlyArray<IStashEntry>> {
const prettyFormat = '%H@%gs'
export async function getStashes(repository: Repository): Promise<StashResult> {
const delimiter = '1F'
const delimiterString = String.fromCharCode(parseInt(delimiter, 16))
const format = ['%gd', '%H', '%gs'].join(`%x${delimiter}`)
const result = await git(
['log', '-g', 'refs/stash', `--pretty=${prettyFormat}`],
['log', '-g', '-z', `--pretty=${format}`, 'refs/stash'],
repository.path,
'getStashEntries'
'getStashEntries',
{
successExitCodes: new Set([0, 128]),
}
)
if (result.stderr !== '') {
//don't really care what the error is right now, but will once dugite is updated
throw new Error(result.stderr)
// There's no refs/stashes reflog in the repository or it's not
// even a repository. In either case we don't care
if (result.exitCode === 128) {
return { desktopEntries: [], stashEntryCount: 0 }
}
const out = result.stdout
const lines = out.split('\n')
const desktopStashEntries: Array<IStashEntry> = []
const files: StashedFileChanges = {
kind: StashedChangesLoadStates.NotLoaded,
}
const stashEntries: Array<IStashEntry> = []
for (const line of lines) {
const match = stashEntryRe.exec(line)
const entries = result.stdout.split('\0').filter(s => s !== '')
for (const entry of entries) {
const pieces = entry.split(delimiterString)
if (match == null) {
continue
if (pieces.length === 3) {
const [name, stashSha, message] = pieces
const branchName = extractBranchFromMessage(message)
if (branchName !== null) {
desktopStashEntries.push({
name,
branchName,
stashSha,
files,
})
}
}
}
return {
desktopEntries: desktopStashEntries,
stashEntryCount: entries.length - 1,
}
}
/**
* Returns the last Desktop created stash entry for the given branch
*/
export async function getLastDesktopStashEntryForBranch(
repository: Repository,
branchName: string
) {
const stash = await getStashes(repository)
// Since stash objects are returned in a LIFO manner, the first
// entry found is guaranteed to be the last entry created
return (
stash.desktopEntries.find(stash => stash.branchName === branchName) || null
)
}
/** Creates a stash entry message that idicates the entry was created by Desktop */
export function createDesktopStashMessage(branchName: string) {
return `${DesktopStashEntryMarker}<${branchName}>`
}
/**
* Stash the working directory changes for the current branch
*/
export async function createDesktopStashEntry(
repository: Repository,
branchName: string,
untrackedFilesToStage: ReadonlyArray<WorkingDirectoryFileChange>
): Promise<true> {
// We must ensure that no untracked files are present before stashing
// See https://github.com/desktop/desktop/pull/8085
// First ensure that all changes in file are selected
// (in case the user has not explicitly checked the checkboxes for the untracked files)
const fullySelectedUntrackedFiles = untrackedFilesToStage.map(x =>
x.withIncludeAll(true)
)
await stageFiles(repository, fullySelectedUntrackedFiles)
const message = createDesktopStashMessage(branchName)
const args = ['stash', 'push', '-m', message]
const result = await git(args, repository.path, 'createStashEntry', {
successExitCodes: new Set<number>([0, 1]),
})
if (result.exitCode === 1) {
// search for any line starting with `error:` - /m here to ensure this is
// applied to each line, without needing to split the text
const errorPrefixRe = /^error: /m
const matches = errorPrefixRe.exec(result.stderr)
if (matches !== null && matches.length > 0) {
// rethrow, because these messages should prevent the stash from being created
throw new GitError(result, args)
}
const message = match[2]
const branchName = extractBranchFromMessage(message)
// if no error messages were emitted by Git, we should log but continue because
// a valid stash was created and this should not interfere with the checkout
// if branch name is null, the stash entry isn't using our magic string
if (branchName === null) {
continue
}
log.info(
`[createDesktopStashEntry] a stash was created successfully but exit code ${
result.exitCode
} reported. stderr: ${result.stderr}`
)
}
stashEntries.push({
branchName: branchName,
stashSha: match[1],
return true
}
async function getStashEntryMatchingSha(repository: Repository, sha: string) {
const stash = await getStashes(repository)
return stash.desktopEntries.find(e => e.stashSha === sha) || null
}
/**
* Removes the given stash entry if it exists
*
* @param stashSha the SHA that identifies the stash entry
*/
export async function dropDesktopStashEntry(
repository: Repository,
stashSha: string
) {
const entryToDelete = await getStashEntryMatchingSha(repository, stashSha)
if (entryToDelete !== null) {
const args = ['stash', 'drop', entryToDelete.name]
await git(args, repository.path, 'dropStashEntry')
}
}
/**
* Pops the stash entry identified by matching `stashSha` to its commit hash.
*
* To see the commit hash of stash entry, run
* `git log -g refs/stash --pretty="%nentry: %gd%nsubject: %gs%nhash: %H%n"`
* in a repo with some stash entries.
*/
export async function popStashEntry(
repository: Repository,
stashSha: string
): Promise<void> {
// ignoring these git errors for now, this will change when we start
// implementing the stash conflict flow
const expectedErrors = new Set<DugiteError>([DugiteError.MergeConflicts])
const successExitCodes = new Set<number>([0, 1])
const stashToPop = await getStashEntryMatchingSha(repository, stashSha)
if (stashToPop !== null) {
const args = ['stash', 'pop', '--quiet', `${stashToPop.name}`]
const result = await git(args, repository.path, 'popStashEntry', {
expectedErrors,
successExitCodes,
})
}
return stashEntries
// popping a stashes that create conflicts in the working directory
// report an exit code of `1` and are not dropped after being applied.
// so, we check for this case and drop them manually
if (result.exitCode === 1) {
if (result.stderr.length > 0) {
// rethrow, because anything in stderr should prevent the stash from being popped
throw new GitError(result, args)
}
log.info(
`[popStashEntry] a stash was popped successfully but exit code ${
result.exitCode
} reported.`
)
// bye bye
await dropDesktopStashEntry(repository, stashSha)
}
}
}
function extractBranchFromMessage(message: string): string | null {
const [, desktopMessage] = message.split(':').map(s => s.trim())
const match = stashEntryMessageRe.exec(desktopMessage)
if (match === null) {
return null
const match = desktopStashEntryMessageRe.exec(message)
return match === null || match[1].length === 0 ? null : match[1]
}
/**
* Get the files that were changed in the given stash commit.
*
* This is different than `getChangedFiles` because stashes
* have _3 parents(!!!)_
*/
export async function getStashedFiles(
repository: Repository,
stashSha: string
): Promise<ReadonlyArray<CommittedFileChange>> {
const [trackedFiles, untrackedFiles] = await Promise.all([
getChangedFilesWithinStash(repository, stashSha),
getChangedFilesWithinStash(repository, `${stashSha}^3`),
])
const files = new Map<string, CommittedFileChange>()
trackedFiles.forEach(x => files.set(x.path, x))
untrackedFiles.forEach(x => files.set(x.path, x))
return [...files.values()].sort((x, y) => x.path.localeCompare(y.path))
}
/**
* Same thing as `getChangedFiles` but with extra handling for 128 exit code
* (which happens if the commit's parent is not valid)
*
* **TODO:** merge this with `getChangedFiles` in `log.ts`
*/
async function getChangedFilesWithinStash(repository: Repository, sha: string) {
// opt-in for rename detection (-M) and copies detection (-C)
// this is equivalent to the user configuring 'diff.renames' to 'copies'
// NOTE: order here matters - doing -M before -C means copies aren't detected
const args = [
'log',
sha,
'-C',
'-M',
'-m',
'-1',
'--no-show-signature',
'--first-parent',
'--name-status',
'--format=format:',
'-z',
'--',
]
const result = await git(args, repository.path, 'getChangedFilesForStash', {
// if this fails, its most likely
// because there weren't any untracked files,
// and that's okay!
successExitCodes: new Set([0, 128]),
})
if (result.exitCode === 0 && result.stdout.length > 0) {
return parseChangedFiles(result.stdout, sha)
}
const branchName = match[1]
return branchName.length > 0 ? branchName : null
}
export function createStashMessage(branchName: string, tipSha: string) {
return `${DesktopStashEntryMarker}<${branchName}@${tipSha}>`
return []
}

View file

@ -25,9 +25,8 @@ import { IAheadBehind } from '../../models/branch'
import { fatalError } from '../../lib/fatal-error'
import { isMergeHeadSet } from './merge'
import { getBinaryPaths } from './diff'
import { getRebaseContext } from './rebase'
import { enablePullWithRebase } from '../feature-flag'
import { RebaseContext } from '../../models/rebase'
import { getRebaseInternalState } from './rebase'
import { RebaseInternalState } from '../../models/rebase'
/**
* V8 has a limit on the size of string it can create (~256MB), and unless we want to
@ -60,7 +59,7 @@ export interface IStatusResult {
readonly mergeHeadFound: boolean
/** details about the rebase operation, if found */
readonly rebaseContext: RebaseContext | null
readonly rebaseInternalState: RebaseInternalState | null
/** the absolute path to the repository's working directory */
readonly workingDirectory: WorkingDirectoryStatus
@ -150,6 +149,10 @@ function convertToAppStatus(
return fatalError(`Unknown file status ${status}`)
}
// List of known conflicted index entries for a file, extracted from mapStatus
// inside `app/src/lib/status-parser.ts` for convenience
const conflictStatusCodes = ['DD', 'AU', 'UD', 'UA', 'DU', 'AA', 'UU']
/**
* Retrieve the status for a given repository,
* and fail gracefully if the location is not a Git repository
@ -196,20 +199,18 @@ export async function getStatus(
const headers = parsed.filter(isStatusHeader)
const entries = parsed.filter(isStatusEntry)
let conflictDetails: ConflictFilesDetails
const mergeHeadFound = await isMergeHeadSet(repository)
const rebaseContext = await getRebaseContext(repository)
const conflictedFilesInIndex = entries.some(
e => conflictStatusCodes.indexOf(e.statusCode) > -1
)
const rebaseInternalState = await getRebaseInternalState(repository)
if (enablePullWithRebase()) {
conflictDetails = await getConflictDetails(
repository,
mergeHeadFound,
rebaseContext
)
} else {
conflictDetails = await getConflictDetails(repository, mergeHeadFound, null)
}
const conflictDetails = await getConflictDetails(
repository,
mergeHeadFound,
conflictedFilesInIndex,
rebaseInternalState
)
// Map of files keyed on their paths.
const files = entries.reduce(
@ -239,7 +240,7 @@ export async function getStatus(
branchAheadBehind,
exists: true,
mergeHeadFound,
rebaseContext,
rebaseInternalState,
workingDirectory,
}
}
@ -357,25 +358,53 @@ async function getRebaseConflictDetails(repository: Repository) {
}
}
/**
* We need to do these operations to detect conflicts that were the result
* of popping a stash into the index
*/
async function getWorkingDirectoryConflictDetails(repository: Repository) {
const conflictCountsByPath = await getFilesWithConflictMarkers(
repository.path
)
let binaryFilePaths: ReadonlyArray<string> = []
try {
// its totally fine if HEAD doesn't exist, which throws an error
binaryFilePaths = await getBinaryPaths(repository, 'HEAD')
} catch (error) {}
return {
conflictCountsByPath,
binaryFilePaths,
}
}
/**
* gets the conflicted files count and binary file paths in a given repository.
* for computing an `IStatusResult`.
*
* @param repository to get details from
* @param mergeHeadFound whether a merge conflict has been detected
* @param rebaseContext details about the current rebase operation (if found)
* @param lookForStashConflicts whether it looks like a stash has introduced conflicts
* @param rebaseInternalState details about the current rebase operation (if found)
*/
async function getConflictDetails(
repository: Repository,
mergeHeadFound: boolean,
rebaseContext: RebaseContext | null
lookForStashConflicts: boolean,
rebaseInternalState: RebaseInternalState | null
): Promise<ConflictFilesDetails> {
try {
if (mergeHeadFound) {
return await getMergeConflictDetails(repository)
} else if (rebaseContext !== null) {
}
if (rebaseInternalState !== null) {
return await getRebaseConflictDetails(repository)
}
if (lookForStashConflicts) {
return await getWorkingDirectoryConflictDetails(repository)
}
} catch (error) {
log.error(
'Unexpected error from git operations in getConflictDetails',

127
app/src/lib/git/worktree.ts Normal file
View file

@ -0,0 +1,127 @@
import * as Os from 'os'
import * as Path from 'path'
import * as FSE from 'fs-extra'
import { git } from './core'
import { v4 as uuid } from 'uuid'
import { Repository, LinkedWorkTree } from '../../models/repository'
import { getMatches } from '../helpers/regex'
const DesktopWorkTreePrefix = 'github-desktop-worktree-'
/** Enumerate the list of work trees reported by Git for a repository */
export async function listWorkTrees(
repository: Repository
): Promise<ReadonlyArray<LinkedWorkTree>> {
const result = await git(
['worktree', 'list', '--porcelain'],
repository.path,
'listWorkTrees'
)
const worktrees = new Array<LinkedWorkTree>()
// the porcelain output from git-worktree covers multiple lines
const listWorkTreeRe = /worktree (.*)\nHEAD ([a-f0-9]*)\n(branch .*|detached)\n/gm
getMatches(result.stdout, listWorkTreeRe).forEach(m => {
if (m.length === 4) {
worktrees.push({
path: m[1],
head: m[2],
})
} else {
log.debug(
`[listWorkTrees] match '${
m[0]
}' does not have the expected data or output. Skipping...`
)
}
})
return worktrees
}
/**
* Creates a temporary work tree for use in Desktop, even if one already exists
* for that repository. Won't modify the repository's working directory.
* _The returned worktree will be checked out to the given commit._
*/
export async function createTemporaryWorkTree(
repository: Repository,
commit: string
): Promise<LinkedWorkTree> {
const workTreePath = await FSE.mkdtemp(getTemporaryDirectoryPrefix())
await git(
['worktree', 'add', '-f', workTreePath, commit],
repository.path,
'addWorkTree'
)
// Because Git doesn't give enough information from stdout for the previous
// Git call, this function enumerates the available worktrees to find the
// expected worktree
const workTrees = await listWorkTrees(repository)
const directoryName = Path.basename(workTreePath)
const workTree = workTrees.find(t => Path.basename(t.path) === directoryName)
// intentionally vague here to cover `undefined` and `null`
if (!workTree) {
throw new Error(
`[addWorkTree] Unable to find created worktree at path ${workTreePath}`
)
}
return workTree
}
/** Cleanup the temporary worktree at a given location */
export async function destroyWorkTree(
repository: Repository,
workTree: LinkedWorkTree
): Promise<true> {
await git(
['worktree', 'remove', '-f', workTree.path],
repository.path,
'removeWorkTree'
)
return true
}
// creates a unique (to desktop) path in the OS's temp dir
function getTemporaryDirectoryPrefix() {
return Path.join(Os.tmpdir(), `${DesktopWorkTreePrefix}${uuid()}`)
}
async function findTemporaryWorkTrees(
repository: Repository
): Promise<ReadonlyArray<LinkedWorkTree>> {
const workTrees = await listWorkTrees(repository)
// always exclude the first entry as that will be "main" worktree and we
// should not even look at it funny
const candidateWorkTrees = workTrees.slice(1)
return candidateWorkTrees.filter(t => {
// NOTE:
// we can't reliably check the full path here because Git seems to be
// prefixing the temporary paths on macOS with a `/private` prefix, and
// NodeJS doesn't seem to include this when we ask for the temporary
// directory for the OS
const directoryName = Path.basename(t.path)
return directoryName.startsWith(DesktopWorkTreePrefix)
})
}
/** Enumerate and cleanup any worktrees generated by Desktop */
export async function cleanupTemporaryWorkTrees(
repository: Repository
): Promise<void> {
const temporaryWorkTrees = await findTemporaryWorkTrees(repository)
for (const workTree of temporaryWorkTrees) {
await destroyWorkTree(repository, workTree)
}
}

View file

@ -174,6 +174,15 @@ declare namespace NodeJS {
interface Process extends EventEmitter {
once(event: 'uncaughtException', listener: (error: Error) => void): this
on(event: 'uncaughtException', listener: (error: Error) => void): this
on(
event: 'send-non-fatal-exception',
listener: (error: Error, context?: { [key: string]: string }) => void
): this
emit(
event: 'send-non-fatal-exception',
error: Error,
context?: { [key: string]: string }
): this
removeListener(event: 'exit', listener: Function): this
once(event: 'exit', listener: Function): this
}
@ -188,13 +197,6 @@ interface XMLHttpRequest extends XMLHttpRequestEventTarget {
}
declare namespace Electron {
interface MenuItem {
readonly accelerator?: Electron.Accelerator
readonly submenu?: Electron.Menu
readonly role?: string
readonly type: 'normal' | 'separator' | 'submenu' | 'checkbox' | 'radio'
}
interface RequestOptions {
readonly method: string
readonly url: string

View file

@ -23,7 +23,7 @@ export function generateGravatarUrl(email: string, size: number = 60): string {
* endpoint associated with an account.
*
* This is a workaround for a current limitation with
* GitHub Enterprise, where avatar URLs are inaccessible
* GitHub Enterprise Server, where avatar URLs are inaccessible
* in some scenarios.
*
* @param avatar_url The canonical avatar to use

View file

@ -0,0 +1,28 @@
/**
* Send a caught (ie. non-fatal) exception to the
* non-fatal error bucket
*
* The intended use of this message is for getting insight into
* areas of the code where we suspect alternate failure modes
* other than those accounted for.
*
* Example: In the Desktop tutorial creation logic we handle
* all errors and our initial belief was that the only two failure
* modes we would have to account for were either the repo existing
* on disk or on the user's account. We now suspect that there might
* be other reasons why the creation logic is failing and therefore
* want to send all errors encountered during creation to central
* where we can determine if there are additional failure modes
* for us to consider.
*
* @param kind - a grouping key that allows us to group all errors
* originating in the same area of the code base or relating to the
* same kind of failure (recommend a single non-hyphenated word)
* Example: tutorialRepoCreation
*
* @param error - the caught error
*/
export function sendNonFatalException(kind: string, error: Error) {
process.emit('send-non-fatal-exception', error, { kind })
}

View file

@ -1,4 +1,5 @@
import * as appProxy from '../ui/lib/app-proxy'
import { URL } from 'url'
/** The HTTP methods available. */
export type HTTPMethod = 'GET' | 'POST' | 'PUT' | 'HEAD'
@ -29,6 +30,9 @@ export class APIError extends Error {
/** The error as sent from the API, if one could be parsed. */
public readonly apiError: IAPIError | null
/** The HTTP response code that the error was delivered with */
public readonly responseStatus: number
public constructor(response: Response, apiError: IAPIError | null) {
let message
if (apiError && apiError.message) {
@ -47,6 +51,7 @@ export class APIError extends Error {
super(message)
this.responseStatus = response.status
this.apiError = apiError
}
}
@ -87,7 +92,16 @@ export function getAbsoluteUrl(endpoint: string, path: string): string {
if (relativePath.startsWith('api/v3/')) {
relativePath = relativePath.substr(7)
}
return encodeURI(`${endpoint}/${relativePath}`)
// Our API endpoints are a bit sloppy in that they don't typically
// include the trailing slash (i.e. we use https://api.github.com for
// dotcom and https://ghe.enterprise.local/api/v3 for Enterprise Server when
// both of those should really include the trailing slash since that's
// the qualified base). We'll work around our past since here by ensuring
// that the endpoint ends with a trailing slash.
const base = endpoint.endsWith('/') ? endpoint : `${endpoint}/`
return new URL(relativePath, base).toString()
}
/**

View file

@ -17,7 +17,7 @@ export interface IMenuItem {
* When specified the click property will be ignored.
* See https://electronjs.org/docs/api/menu-item#roles
*/
readonly role?: string
readonly role?: Electron.MenuItemConstructorOptions['role']
}
/**

View file

@ -1,4 +1,4 @@
import { MenuIDs } from '../main-process/menu'
import { MenuIDs } from '../models/menu-ids'
import { merge } from './merge'
import { IAppState, SelectionType } from '../lib/app-state'
import { Repository } from '../models/repository'
@ -100,6 +100,7 @@ function menuItemStateEqual(state: IMenuItemState, menuItem: MenuItem) {
const allMenuIds: ReadonlyArray<MenuIDs> = [
'rename-branch',
'delete-branch',
'discard-all-changes',
'preferences',
'update-branch',
'compare-to-branch',
@ -149,17 +150,19 @@ function getRepositoryMenuBuilder(state: IAppState): MenuStateBuilder {
let onNonDefaultBranch = false
let onBranch = false
let onDetachedHead = false
let hasChangedFiles = false
let hasDefaultBranch = false
let hasPublishedBranch = false
let networkActionInProgress = false
let tipStateIsUnknown = false
let branchIsUnborn = false
let rebaseInProgress = false
let branchHasStashEntry = false
if (selectedState && selectedState.type === SelectionType.Repository) {
repositorySelected = true
const branchesState = selectedState.state.branchesState
const { branchesState, changesState } = selectedState.state
const tip = branchesState.tip
const defaultBranch = branchesState.defaultBranch
@ -181,15 +184,17 @@ function getRepositoryMenuBuilder(state: IAppState): MenuStateBuilder {
}
hasPublishedBranch = !!tip.branch.upstream
branchHasStashEntry = changesState.stashEntry !== null
} else {
onNonDefaultBranch = true
}
networkActionInProgress = selectedState.state.isPushPullFetchInProgress
const { conflictState } = selectedState.state.changesState
const { conflictState, workingDirectory } = selectedState.state.changesState
rebaseInProgress = conflictState !== null && conflictState.kind === 'rebase'
hasChangedFiles = workingDirectory.files.length > 0
}
// These are IDs for menu items that are entirely _and only_
@ -258,7 +263,13 @@ function getRepositoryMenuBuilder(state: IAppState): MenuStateBuilder {
!tipStateIsUnknown && !branchIsUnborn && !rebaseInProgress
)
menuStateBuilder.setEnabled(
'discard-all-changes',
repositoryActive && hasChangedFiles && !rebaseInProgress
)
menuStateBuilder.setEnabled('compare-to-branch', !onDetachedHead)
menuStateBuilder.setEnabled('toggle-stashed-changes', branchHasStashEntry)
if (
selectedState &&
@ -287,6 +298,7 @@ function getRepositoryMenuBuilder(state: IAppState): MenuStateBuilder {
menuStateBuilder.disable('create-branch')
menuStateBuilder.disable('rename-branch')
menuStateBuilder.disable('delete-branch')
menuStateBuilder.disable('discard-all-changes')
menuStateBuilder.disable('update-branch')
menuStateBuilder.disable('merge-branch')
menuStateBuilder.disable('rebase-branch')
@ -295,7 +307,9 @@ function getRepositoryMenuBuilder(state: IAppState): MenuStateBuilder {
menuStateBuilder.disable('pull')
menuStateBuilder.disable('compare-to-branch')
menuStateBuilder.disable('compare-on-github')
menuStateBuilder.disable('toggle-stashed-changes')
}
return menuStateBuilder
}

View file

@ -1,5 +1,5 @@
import { IMergeEntry, MergeResult } from '../models/merge'
import { ComputedActionKind } from '../models/action'
import { ComputedAction } from '../models/computed-action'
interface IBlobSource {
readonly type: string
@ -194,10 +194,10 @@ export function parseMergeResult(text: string): MergeResult {
if (entriesWithConflicts.length > 0) {
return {
kind: ComputedActionKind.Conflicts,
kind: ComputedAction.Conflicts,
conflictedFiles: entriesWithConflicts.length,
}
} else {
return { kind: ComputedActionKind.Clean, entries }
return { kind: ComputedAction.Clean, entries }
}
}

View file

@ -1,14 +1,22 @@
import { IRepositoryState, RebaseConflictState } from '../lib/app-state'
import {
IRepositoryState,
RebaseConflictState,
IBranchesState,
} from '../lib/app-state'
import {
ChooseBranchesStep,
RebaseStep,
ShowConflictsStep,
} from '../models/rebase-flow-state'
import { Branch } from '../models/branch'
} from '../models/rebase-flow-step'
import { Branch, IAheadBehind } from '../models/branch'
import { TipState } from '../models/tip'
import { clamp } from './clamp'
export const initializeNewRebaseFlow = (state: IRepositoryState) => {
/**
* Setup the rebase flow state when the user neeeds to select a branch as the
* base for the operation.
*/
export function initializeNewRebaseFlow(state: IRepositoryState) {
const {
defaultBranch,
allBranches,
@ -36,17 +44,21 @@ export const initializeNewRebaseFlow = (state: IRepositoryState) => {
return initialState
}
export const initializeRebaseFlowForConflictedRepository = (
/**
* Setup the rebase flow when rebase conflicts are detected in the repository.
*
* This indicates a rebase is in progress, and the application needs to guide
* the user to resolve conflicts and complete the rebae.
*
* @param conflictState current set of conflicts
*/
export function initializeRebaseFlowForConflictedRepository(
conflictState: RebaseConflictState
) => {
const { targetBranch, baseBranch } = conflictState
): ShowConflictsStep {
const initialState: ShowConflictsStep = {
kind: RebaseStep.ShowConflicts,
targetBranch,
baseBranch,
conflictState,
}
return initialState
}
@ -58,3 +70,30 @@ export const initializeRebaseFlowForConflictedRepository = (
export function formatRebaseValue(value: number) {
return Math.round(clamp(value, 0, 1) * 100) / 100
}
/**
* Check application state to see whether the action applied to the current
* branch should be a force push
*/
export function isCurrentBranchForcePush(
branchesState: IBranchesState,
aheadBehind: IAheadBehind | null
) {
if (aheadBehind === null) {
// no tracking branch found
return false
}
const { tip, rebasedBranches } = branchesState
const { ahead, behind } = aheadBehind
let branchWasRebased = false
if (tip.kind === TipState.Valid) {
const localBranchName = tip.branch.nameWithoutRemote
const { sha } = tip.branch.tip
const foundEntry = rebasedBranches.get(localBranchName)
branchWasRebased = foundEntry === sha
}
return branchWasRebased && behind > 0 && ahead > 0
}

View file

@ -57,12 +57,10 @@ async function getRawShellEnv(): Promise<string | null> {
cleanup()
}, 5000)
const options = {
child = ChildProcess.spawn(shell, ['-ilc', 'command env'], {
detached: true,
stdio: ['ignore', 'pipe', process.stderr],
}
child = ChildProcess.spawn(shell, ['-ilc', 'command env'], options)
})
const buffers: Array<Buffer> = []

View file

@ -9,6 +9,7 @@ export enum Shell {
Hyper = 'Hyper',
iTerm2 = 'iTerm2',
PowerShellCore = 'PowerShell Core',
Kitty = 'Kitty',
}
export const Default = Shell.Terminal
@ -30,6 +31,10 @@ export function parse(label: string): Shell {
return Shell.PowerShellCore
}
if (label === Shell.Kitty) {
return Shell.Kitty
}
return Default
}
@ -43,6 +48,8 @@ function getBundleID(shell: Shell): string {
return 'co.zeit.hyper'
case Shell.PowerShellCore:
return 'com.microsoft.powershell'
case Shell.Kitty:
return 'net.kovidgoyal.kitty'
default:
return assertNever(shell, `Unknown shell: ${shell}`)
}
@ -66,11 +73,13 @@ export async function getAvailableShells(): Promise<
hyperPath,
iTermPath,
powerShellCorePath,
kittyPath,
] = await Promise.all([
getShellPath(Shell.Terminal),
getShellPath(Shell.Hyper),
getShellPath(Shell.iTerm2),
getShellPath(Shell.PowerShellCore),
getShellPath(Shell.Kitty),
])
const shells: Array<IFoundShell<Shell>> = []
@ -90,6 +99,11 @@ export async function getAvailableShells(): Promise<
shells.push({ shell: Shell.PowerShellCore, path: powerShellCorePath })
}
if (kittyPath) {
const kittyExecutable = `${kittyPath}/Contents/MacOS/kitty`
shells.push({ shell: Shell.Kitty, path: kittyExecutable })
}
return shells
}
@ -97,7 +111,16 @@ export function launch(
foundShell: IFoundShell<Shell>,
path: string
): ChildProcess {
const bundleID = getBundleID(foundShell.shell)
const commandArgs = ['-b', bundleID, path]
return spawn('open', commandArgs)
if (foundShell.shell === Shell.Kitty) {
// kitty does not handle arguments as expected when using `open` with
// an existing session but closed window (it reverts to the previous
// directory rather than using the new directory directory).
//
// This workaround launches the internal `kitty` executable which
// will open a new window to the desired path.
return spawn(foundShell.path, ['--single-instance', '--directory', path])
} else {
const bundleID = getBundleID(foundShell.shell)
return spawn('open', ['-b', bundleID, path])
}
}

View file

@ -5,6 +5,7 @@ import { pathExists } from 'fs-extra'
import { assertNever } from '../fatal-error'
import { IFoundShell } from './found-shell'
import { enableWSLDetection } from '../feature-flag'
export enum Shell {
Cmd = 'Command Prompt',
@ -13,6 +14,7 @@ export enum Shell {
Hyper = 'Hyper',
GitBash = 'Git Bash',
Cygwin = 'Cygwin',
WSL = 'WSL',
}
export const Default = Shell.Cmd
@ -42,6 +44,10 @@ export function parse(label: string): Shell {
return Shell.Cygwin
}
if (label === Shell.WSL) {
return Shell.WSL
}
return Default
}
@ -95,6 +101,16 @@ export async function getAvailableShells(): Promise<
})
}
if (enableWSLDetection()) {
const wslPath = await findWSL()
if (wslPath != null) {
shells.push({
shell: Shell.WSL,
path: wslPath,
})
}
}
return shells
}
@ -268,6 +284,45 @@ async function findCygwin(): Promise<string | null> {
return null
}
async function findWSL(): Promise<string | null> {
const system32 = Path.join(
process.env.SystemRoot || 'C:\\Windows',
'System32'
)
const wslPath = Path.join(system32, 'wsl.exe')
const wslConfigPath = Path.join(system32, 'wslconfig.exe')
if (!(await pathExists(wslPath))) {
log.debug(`[WSL] wsl.exe does not exist at '${wslPath}'`)
return null
}
if (!(await pathExists(wslConfigPath))) {
log.debug(
`[WSL] found wsl.exe, but wslconfig.exe does not exist at '${wslConfigPath}'`
)
return null
}
const exitCode = new Promise<number>((resolve, reject) => {
const wslDistros = spawn(wslConfigPath, ['/list'])
wslDistros.on('error', reject)
wslDistros.on('exit', resolve)
})
try {
const result = await exitCode
if (result !== 0) {
log.debug(
`[WSL] found wsl.exe and wslconfig.exe, but no distros are installed. Error Code: ${result}`
)
return null
}
return wslPath
} catch (err) {
log.error(`[WSL] unhandled error when invoking 'wsl /list'`, err)
}
return null
}
export function launch(
foundShell: IFoundShell<Shell>,
path: string
@ -312,6 +367,8 @@ export function launch(
cwd: path,
}
)
case Shell.WSL:
return spawn('START', ['wsl'], { shell: true, cwd: path })
case Shell.Cmd:
return spawn('START', ['cmd'], { shell: true, cwd: path })
default:

View file

@ -89,10 +89,10 @@ export interface IDailyMeasures {
/** The number of times the user pushes with `--force-with-lease` to GitHub.com */
readonly dotcomForcePushCount: number
/** The number of times the user pushed to a GitHub enterprise instance */
/** The number of times the user pushed to a GitHub Enterprise Server instance */
readonly enterprisePushCount: number
/** The number of times the user pushes with `--force-with-lease` to a GitHub Enterprise instance */
/** The number of times the user pushes with `--force-with-lease` to a GitHub Enterprise Server instance */
readonly enterpriseForcePushCount: number
/** The number of times the users pushes to a generic remote */
@ -130,13 +130,19 @@ export interface IDailyMeasures {
/**
* The number of times the user made a commit to a repo hosted on
* a GitHub Enterprise instance
* a GitHub Enterprise Server instance
*/
readonly enterpriseCommits: number
/** The number of time the user made a commit to a repo hosted on Github.com */
/** The number of times the user made a commit to a repo hosted on Github.com */
readonly dotcomCommits: number
/** The number of times the user made a commit to a protected GitHub or GitHub Enterprise Server repository */
readonly commitsToProtectedBranch: number
/** The number of times the user made a commit to a repository with branch protections enabled */
readonly commitsToRepositoryWithBranchProtections: number
/** The number of times the user dismissed the merge conflicts dialog */
readonly mergeConflictsDialogDismissalCount: number
@ -164,17 +170,165 @@ export interface IDailyMeasures {
/** The number of times an aborted rebase is detected */
readonly rebaseAbortedAfterConflictsCount: number
/** The number of times a successful rebase is detected */
/** The number of times a successful rebase after handling conflicts is detected */
readonly rebaseSuccessAfterConflictsCount: number
/** The number of times a successful rebase without conflicts is detected */
readonly rebaseSuccessWithoutConflictsCount: number
/** The number of times a user performed a pull with `pull.rebase` in config set to `true` */
readonly pullWithRebaseCount: number
/** The number of times a user has pulled with `pull.rebase` unset or set to `false` */
readonly pullWithDefaultSettingCount: number
/**
* The number of stash entries created outside of Desktop
* in a given 24 hour day
*/
readonly stashEntriesCreatedOutsideDesktop: number
/**
* The number of times the user is presented with the error
* message "Some of your changes would be overwritten"
*/
readonly errorWhenSwitchingBranchesWithUncommmittedChanges: number
/** The number of times the user opens the "Rebase current branch" menu item */
readonly rebaseCurrentBranchMenuCount: number
/** The number of times the user views a stash entry after checking out a branch */
readonly stashViewedAfterCheckoutCount: number
/** The number of times the user **doesn't** view a stash entry after checking out a branch */
readonly stashNotViewedAfterCheckoutCount: number
/** The number of times the user elects to stash changes on the current branch */
readonly stashCreatedOnCurrentBranchCount: number
/** The number of times the user elects to take changes to new branch instead of stashing them */
readonly changesTakenToNewBranchCount: number
/** The number of times the user elects to restore an entry from their stash */
readonly stashRestoreCount: number
/** The number of times the user elects to discard a stash entry */
readonly stashDiscardCount: number
/**
* The number of times the user views the stash entry as a result
* of clicking the "Stashed changes" row directly
*/
readonly stashViewCount: number
/** The number of times the user takes no action on a stash entry once viewed */
readonly noActionTakenOnStashCount: number
/**
* The number of times the user has opened their external editor from the
* suggested next steps view
*/
readonly suggestedStepOpenInExternalEditor: number
/**
* The number of times the user has opened their repository in Finder/Explorer
* from the suggested next steps view
*/
readonly suggestedStepOpenWorkingDirectory: number
/**
* The number of times the user has opened their repository on GitHub from the
* suggested next steps view
*/
readonly suggestedStepViewOnGitHub: number
/**
* The number of times the user has used the publish repository action from the
* suggested next steps view
*/
readonly suggestedStepPublishRepository: number
/**
* The number of times the user has used the publish branch action branch from
* the suggested next steps view
*/
readonly suggestedStepPublishBranch: number
/**
* The number of times the user has used the Create PR suggestion
* in the suggested next steps view. Note that this number is a
* subset of `createPullRequestCount`. I.e. if the Create PR suggestion
* is invoked both `suggestedStepCreatePR` and `createPullRequestCount`
* will increment whereas if a PR is created from the menu or from
* a keyboard shortcut only `createPullRequestCount` will increment.
*/
readonly suggestedStepCreatePullRequest: number
/**
* The number of times the user has used the view stash action from
* the suggested next steps view
*/
readonly suggestedStepViewStash: number
/**
* _[Onboarding tutorial]_
* Has the user clicked the button to start the onboarding tutorial?
*/
readonly tutorialStarted: boolean
/**
* _[Onboarding tutorial]_
* Has the user successfully created a tutorial repo?
*/
readonly tutorialRepoCreated: boolean
/**
* _[Onboarding tutorial]_
* Has the user installed an editor, skipped this step, or have an editor already installed?
*/
readonly tutorialEditorInstalled: boolean
/**
* _[Onboarding tutorial]_
* Has the user successfully completed the create a branch step?
*/
readonly tutorialBranchCreated: boolean
/**
* _[Onboarding tutorial]_
* Has the user completed the edit a file step?
*/
readonly tutorialFileEdited: boolean
/**
* _[Onboarding tutorial]_
* Has the user completed the commit a file change step?
*/
readonly tutorialCommitCreated: boolean
/**
* _[Onboarding tutorial]_
* Has the user completed the push a branch step?
*/
readonly tutorialBranchPushed: boolean
/**
* _[Onboarding tutorial]_
* Has the user compeleted the create a PR step?
*/
readonly tutorialPrCreated: boolean
/**
* _[Onboarding tutorial]_
* Has the user completed all tutorial steps?
*/
readonly tutorialCompleted: boolean
/**
* _[Onboarding tutorial]_
* What's the highest tutorial step completed by user?
* (`0` is tutorial created, first step is `1`)
*/
readonly highestTutorialStepCompleted: number
}
export class StatsDatabase extends Dexie {

View file

@ -89,9 +89,40 @@ const DefaultDailyMeasures: IDailyMeasures = {
rebaseConflictsDialogReopenedCount: 0,
rebaseAbortedAfterConflictsCount: 0,
rebaseSuccessAfterConflictsCount: 0,
rebaseSuccessWithoutConflictsCount: 0,
pullWithRebaseCount: 0,
pullWithDefaultSettingCount: 0,
stashEntriesCreatedOutsideDesktop: 0,
errorWhenSwitchingBranchesWithUncommmittedChanges: 0,
rebaseCurrentBranchMenuCount: 0,
stashViewedAfterCheckoutCount: 0,
stashCreatedOnCurrentBranchCount: 0,
stashNotViewedAfterCheckoutCount: 0,
changesTakenToNewBranchCount: 0,
stashRestoreCount: 0,
stashDiscardCount: 0,
stashViewCount: 0,
noActionTakenOnStashCount: 0,
suggestedStepOpenInExternalEditor: 0,
suggestedStepOpenWorkingDirectory: 0,
suggestedStepViewOnGitHub: 0,
suggestedStepPublishRepository: 0,
suggestedStepPublishBranch: 0,
suggestedStepCreatePullRequest: 0,
suggestedStepViewStash: 0,
commitsToProtectedBranch: 0,
commitsToRepositoryWithBranchProtections: 0,
tutorialStarted: false,
tutorialRepoCreated: false,
tutorialEditorInstalled: false,
tutorialBranchCreated: false,
tutorialFileEdited: false,
tutorialCommitCreated: false,
tutorialBranchPushed: false,
tutorialPrCreated: false,
tutorialCompleted: false,
// this is `-1` because `0` signifies "tutorial created"
highestTutorialStepCompleted: -1,
}
interface IOnboardingStats {
@ -151,7 +182,7 @@ interface IOnboardingStats {
* Time (in seconds) from when the user first launched
* the application and entered the welcome wizard until
* the user performed their first push of a repository
* to GitHub.com or GitHub Enterprise. This metric
* to GitHub.com or GitHub Enterprise Server. This metric
* does not track pushes to non-GitHub remotes.
*/
readonly timeToFirstGitHubPush?: number
@ -233,7 +264,7 @@ interface ICalculatedStats {
/** Is the user logged in with a GitHub.com account? */
readonly dotComAccount: boolean
/** Is the user logged in with an Enterprise account? */
/** Is the user logged in with an Enterprise Server account? */
readonly enterpriseAccount: boolean
/**
@ -648,8 +679,8 @@ export class StatsStore implements IStatsStore {
/**
* Records that the user made a commit using an email address that
* was not associated with the user's account on GitHub.com or GitHub
* Enterprise, meaning that the commit will not be attributed to the user's
* account.
* Enterprise Server, meaning that the commit will not be attributed to the
* user's account.
*/
public recordUnattributedCommit(): Promise<void> {
return this.updateDailyMeasures(m => ({
@ -659,7 +690,7 @@ export class StatsStore implements IStatsStore {
/**
* Records that the user made a commit to a repository hosted on
* a GitHub Enterprise instance
* a GitHub Enterprise Server instance
*/
public recordCommitToEnterprise(): Promise<void> {
return this.updateDailyMeasures(m => ({
@ -674,6 +705,21 @@ export class StatsStore implements IStatsStore {
}))
}
/** Record the user made a commit to a protected GitHub or GitHub Enterprise Server repository */
public recordCommitToProtectedBranch(): Promise<void> {
return this.updateDailyMeasures(m => ({
commitsToProtectedBranch: m.commitsToProtectedBranch + 1,
}))
}
/** Record the user made a commit to repository which has branch protections enabled */
public recordCommitToRepositoryWithBranchProtections(): Promise<void> {
return this.updateDailyMeasures(m => ({
commitsToRepositoryWithBranchProtections:
m.commitsToRepositoryWithBranchProtections + 1,
}))
}
/** Set whether the user has opted out of stats reporting. */
public async setOptOut(
optOut: boolean,
@ -698,14 +744,14 @@ export class StatsStore implements IStatsStore {
}
/** Record that user dismissed diverging branch notification */
public async recordDivergingBranchBannerDismissal(): Promise<void> {
public recordDivergingBranchBannerDismissal(): Promise<void> {
return this.updateDailyMeasures(m => ({
divergingBranchBannerDismissal: m.divergingBranchBannerDismissal + 1,
}))
}
/** Record that user initiated a merge from within the notification banner */
public async recordDivergingBranchBannerInitatedMerge(): Promise<void> {
public recordDivergingBranchBannerInitatedMerge(): Promise<void> {
return this.updateDailyMeasures(m => ({
divergingBranchBannerInitatedMerge:
m.divergingBranchBannerInitatedMerge + 1,
@ -713,7 +759,7 @@ export class StatsStore implements IStatsStore {
}
/** Record that user initiated a compare from within the notification banner */
public async recordDivergingBranchBannerInitiatedCompare(): Promise<void> {
public recordDivergingBranchBannerInitiatedCompare(): Promise<void> {
return this.updateDailyMeasures(m => ({
divergingBranchBannerInitiatedCompare:
m.divergingBranchBannerInitiatedCompare + 1,
@ -724,7 +770,7 @@ export class StatsStore implements IStatsStore {
* Record that user initiated a merge after getting to compare view
* from within notification banner
*/
public async recordDivergingBranchBannerInfluencedMerge(): Promise<void> {
public recordDivergingBranchBannerInfluencedMerge(): Promise<void> {
return this.updateDailyMeasures(m => ({
divergingBranchBannerInfluencedMerge:
m.divergingBranchBannerInfluencedMerge + 1,
@ -732,7 +778,7 @@ export class StatsStore implements IStatsStore {
}
/** Record that the user was shown the notification banner */
public async recordDivergingBranchBannerDisplayed(): Promise<void> {
public recordDivergingBranchBannerDisplayed(): Promise<void> {
return this.updateDailyMeasures(m => ({
divergingBranchBannerDisplayed: m.divergingBranchBannerDisplayed + 1,
}))
@ -766,7 +812,7 @@ export class StatsStore implements IStatsStore {
createLocalStorageTimestamp(FirstPushToGitHubAtKey)
}
/** Record that the user pushed to a GitHub Enterprise instance */
/** Record that the user pushed to a GitHub Enterprise Server instance */
private async recordPushToGitHubEnterprise(
options?: PushOptions
): Promise<void> {
@ -801,21 +847,21 @@ export class StatsStore implements IStatsStore {
}
/** Record that the user saw a 'merge conflicts' warning but continued with the merge */
public async recordUserProceededWhileLoading(): Promise<void> {
public recordUserProceededWhileLoading(): Promise<void> {
return this.updateDailyMeasures(m => ({
mergedWithLoadingHintCount: m.mergedWithLoadingHintCount + 1,
}))
}
/** Record that the user saw a 'merge conflicts' warning but continued with the merge */
public async recordMergeHintSuccessAndUserProceeded(): Promise<void> {
public recordMergeHintSuccessAndUserProceeded(): Promise<void> {
return this.updateDailyMeasures(m => ({
mergedWithCleanMergeHintCount: m.mergedWithCleanMergeHintCount + 1,
}))
}
/** Record that the user saw a 'merge conflicts' warning but continued with the merge */
public async recordUserProceededAfterConflictWarning(): Promise<void> {
public recordUserProceededAfterConflictWarning(): Promise<void> {
return this.updateDailyMeasures(m => ({
mergedWithConflictWarningHintCount:
m.mergedWithConflictWarningHintCount + 1,
@ -825,7 +871,7 @@ export class StatsStore implements IStatsStore {
/**
* Increments the `mergeConflictsDialogDismissalCount` metric
*/
public async recordMergeConflictsDialogDismissal(): Promise<void> {
public recordMergeConflictsDialogDismissal(): Promise<void> {
return this.updateDailyMeasures(m => ({
mergeConflictsDialogDismissalCount:
m.mergeConflictsDialogDismissalCount + 1,
@ -835,7 +881,7 @@ export class StatsStore implements IStatsStore {
/**
* Increments the `anyConflictsLeftOnMergeConflictsDialogDismissalCount` metric
*/
public async recordAnyConflictsLeftOnMergeConflictsDialogDismissal(): Promise<
public recordAnyConflictsLeftOnMergeConflictsDialogDismissal(): Promise<
void
> {
return this.updateDailyMeasures(m => ({
@ -847,7 +893,7 @@ export class StatsStore implements IStatsStore {
/**
* Increments the `mergeConflictsDialogReopenedCount` metric
*/
public async recordMergeConflictsDialogReopened(): Promise<void> {
public recordMergeConflictsDialogReopened(): Promise<void> {
return this.updateDailyMeasures(m => ({
mergeConflictsDialogReopenedCount:
m.mergeConflictsDialogReopenedCount + 1,
@ -857,7 +903,7 @@ export class StatsStore implements IStatsStore {
/**
* Increments the `guidedConflictedMergeCompletionCount` metric
*/
public async recordGuidedConflictedMergeCompletion(): Promise<void> {
public recordGuidedConflictedMergeCompletion(): Promise<void> {
return this.updateDailyMeasures(m => ({
guidedConflictedMergeCompletionCount:
m.guidedConflictedMergeCompletionCount + 1,
@ -867,7 +913,7 @@ export class StatsStore implements IStatsStore {
/**
* Increments the `unguidedConflictedMergeCompletionCount` metric
*/
public async recordUnguidedConflictedMergeCompletion(): Promise<void> {
public recordUnguidedConflictedMergeCompletion(): Promise<void> {
return this.updateDailyMeasures(m => ({
unguidedConflictedMergeCompletionCount:
m.unguidedConflictedMergeCompletionCount + 1,
@ -877,7 +923,7 @@ export class StatsStore implements IStatsStore {
/**
* Increments the `createPullRequestCount` metric
*/
public async recordCreatePullRequest(): Promise<void> {
public recordCreatePullRequest(): Promise<void> {
return this.updateDailyMeasures(m => ({
createPullRequestCount: m.createPullRequestCount + 1,
}))
@ -886,7 +932,7 @@ export class StatsStore implements IStatsStore {
/**
* Increments the `rebaseConflictsDialogDismissalCount` metric
*/
public async recordRebaseConflictsDialogDismissal(): Promise<void> {
public recordRebaseConflictsDialogDismissal(): Promise<void> {
return this.updateDailyMeasures(m => ({
rebaseConflictsDialogDismissalCount:
m.rebaseConflictsDialogDismissalCount + 1,
@ -894,9 +940,9 @@ export class StatsStore implements IStatsStore {
}
/**
* Increments the `rebaseConflictsDialogDismissalCount` metric
* Increments the `rebaseConflictsDialogReopenedCount` metric
*/
public async recordRebaseConflictsDialogReopened(): Promise<void> {
public recordRebaseConflictsDialogReopened(): Promise<void> {
return this.updateDailyMeasures(m => ({
rebaseConflictsDialogReopenedCount:
m.rebaseConflictsDialogReopenedCount + 1,
@ -906,7 +952,7 @@ export class StatsStore implements IStatsStore {
/**
* Increments the `rebaseAbortedAfterConflictsCount` metric
*/
public async recordRebaseAbortedAfterConflicts(): Promise<void> {
public recordRebaseAbortedAfterConflicts(): Promise<void> {
return this.updateDailyMeasures(m => ({
rebaseAbortedAfterConflictsCount: m.rebaseAbortedAfterConflictsCount + 1,
}))
@ -920,10 +966,20 @@ export class StatsStore implements IStatsStore {
}))
}
/**
* Increments the `rebaseSuccessWithoutConflictsCount` metric
*/
public recordRebaseSuccessWithoutConflicts(): Promise<void> {
return this.updateDailyMeasures(m => ({
rebaseSuccessWithoutConflictsCount:
m.rebaseSuccessWithoutConflictsCount + 1,
}))
}
/**
* Increments the `rebaseSuccessAfterConflictsCount` metric
*/
public async recordRebaseSuccessAfterConflicts(): Promise<void> {
public recordRebaseSuccessAfterConflicts(): Promise<void> {
return this.updateDailyMeasures(m => ({
rebaseSuccessAfterConflictsCount: m.rebaseSuccessAfterConflictsCount + 1,
}))
@ -968,19 +1024,170 @@ export class StatsStore implements IStatsStore {
}
/** Record when a conflicted merge was successfully completed by the user */
public async recordMergeSuccessAfterConflicts(): Promise<void> {
public recordMergeSuccessAfterConflicts(): Promise<void> {
return this.updateDailyMeasures(m => ({
mergeSuccessAfterConflictsCount: m.mergeSuccessAfterConflictsCount + 1,
}))
}
/** Record when a conflicted merge was aborted by the user */
public async recordMergeAbortedAfterConflicts(): Promise<void> {
public recordMergeAbortedAfterConflicts(): Promise<void> {
return this.updateDailyMeasures(m => ({
mergeAbortedAfterConflictsCount: m.mergeAbortedAfterConflictsCount + 1,
}))
}
/** Record when the user views a stash entry after checking out a branch */
public recordStashViewedAfterCheckout(): Promise<void> {
return this.updateDailyMeasures(m => ({
stashViewedAfterCheckoutCount: m.stashViewedAfterCheckoutCount + 1,
}))
}
/** Record when the user **doesn't** view a stash entry after checking out a branch */
public recordStashNotViewedAfterCheckout(): Promise<void> {
return this.updateDailyMeasures(m => ({
stashNotViewedAfterCheckoutCount: m.stashNotViewedAfterCheckoutCount + 1,
}))
}
/** Record when the user elects to take changes to new branch over stashing */
public recordChangesTakenToNewBranch(): Promise<void> {
return this.updateDailyMeasures(m => ({
changesTakenToNewBranchCount: m.changesTakenToNewBranchCount + 1,
}))
}
/** Record when the user elects to stash changes on the current branch */
public recordStashCreatedOnCurrentBranch(): Promise<void> {
return this.updateDailyMeasures(m => ({
stashCreatedOnCurrentBranchCount: m.stashCreatedOnCurrentBranchCount + 1,
}))
}
/** Record when the user discards a stash entry */
public recordStashDiscard(): Promise<void> {
return this.updateDailyMeasures(m => ({
stashDiscardCount: m.stashDiscardCount + 1,
}))
}
/** Record when the user views a stash entry */
public recordStashView(): Promise<void> {
return this.updateDailyMeasures(m => ({
stashViewCount: m.stashViewCount + 1,
}))
}
/** Record when the user restores a stash entry */
public recordStashRestore(): Promise<void> {
return this.updateDailyMeasures(m => ({
stashRestoreCount: m.stashRestoreCount + 1,
}))
}
/** Record when the user takes no action on the stash entry */
public recordNoActionTakenOnStash(): Promise<void> {
return this.updateDailyMeasures(m => ({
noActionTakenOnStashCount: m.noActionTakenOnStashCount + 1,
}))
}
/** Record the number of stash entries created outside of Desktop for the day */
public addStashEntriesCreatedOutsideDesktop(
stashCount: number
): Promise<void> {
return this.updateDailyMeasures(m => ({
stashEntriesCreatedOutsideDesktop:
m.stashEntriesCreatedOutsideDesktop + stashCount,
}))
}
/**
* Record the number of times the user experiences the error
* "Some of your changes would be overwritten" when switching branches
*/
public recordErrorWhenSwitchingBranchesWithUncommmittedChanges(): Promise<
void
> {
return this.updateDailyMeasures(m => ({
errorWhenSwitchingBranchesWithUncommmittedChanges:
m.errorWhenSwitchingBranchesWithUncommmittedChanges + 1,
}))
}
/**
* Increment the number of times the user has opened their external editor
* from the suggested next steps view
*/
public recordSuggestedStepOpenInExternalEditor(): Promise<void> {
return this.updateDailyMeasures(m => ({
suggestedStepOpenInExternalEditor:
m.suggestedStepOpenInExternalEditor + 1,
}))
}
/**
* Increment the number of times the user has opened their repository in
* Finder/Explorer from the suggested next steps view
*/
public recordSuggestedStepOpenWorkingDirectory(): Promise<void> {
return this.updateDailyMeasures(m => ({
suggestedStepOpenWorkingDirectory:
m.suggestedStepOpenWorkingDirectory + 1,
}))
}
/**
* Increment the number of times the user has opened their repository on
* GitHub from the suggested next steps view
*/
public recordSuggestedStepViewOnGitHub(): Promise<void> {
return this.updateDailyMeasures(m => ({
suggestedStepViewOnGitHub: m.suggestedStepViewOnGitHub + 1,
}))
}
/**
* Increment the number of times the user has used the publish repository
* action from the suggested next steps view
*/
public recordSuggestedStepPublishRepository(): Promise<void> {
return this.updateDailyMeasures(m => ({
suggestedStepPublishRepository: m.suggestedStepPublishRepository + 1,
}))
}
/**
* Increment the number of times the user has used the publish branch
* action branch from the suggested next steps view
*/
public recordSuggestedStepPublishBranch(): Promise<void> {
return this.updateDailyMeasures(m => ({
suggestedStepPublishBranch: m.suggestedStepPublishBranch + 1,
}))
}
/**
* Increment the number of times the user has used the Create PR suggestion
* in the suggested next steps view.
*/
public recordSuggestedStepCreatePullRequest(): Promise<void> {
return this.updateDailyMeasures(m => ({
suggestedStepCreatePullRequest: m.suggestedStepCreatePullRequest + 1,
}))
}
/**
* Increment the number of times the user has used the View Stash suggestion
* in the suggested next steps view.
*/
public recordSuggestedStepViewStash(): Promise<void> {
return this.updateDailyMeasures(m => ({
suggestedStepViewStash: m.suggestedStepViewStash + 1,
}))
}
private onUiActivity = async () => {
this.disableUiActivityMonitoring()
@ -989,6 +1196,87 @@ export class StatsStore implements IStatsStore {
}))
}
/**
* Onboarding tutorial metrics
*/
public recordTutorialStarted() {
return this.updateDailyMeasures(() => ({
tutorialStarted: true,
}))
}
public recordTutorialRepoCreated() {
return this.updateDailyMeasures(() => ({
tutorialRepoCreated: true,
}))
}
public recordTutorialEditorInstalled() {
return this.updateDailyMeasures(() => ({
tutorialEditorInstalled: true,
}))
}
public recordTutorialBranchCreated() {
return this.updateDailyMeasures(() => ({
tutorialEditorInstalled: true,
tutorialBranchCreated: true,
}))
}
public recordTutorialFileEdited() {
return this.updateDailyMeasures(() => ({
tutorialEditorInstalled: true,
tutorialBranchCreated: true,
tutorialFileEdited: true,
}))
}
public recordTutorialCommitCreated() {
return this.updateDailyMeasures(() => ({
tutorialEditorInstalled: true,
tutorialBranchCreated: true,
tutorialFileEdited: true,
tutorialCommitCreated: true,
}))
}
public recordTutorialBranchPushed() {
return this.updateDailyMeasures(() => ({
tutorialEditorInstalled: true,
tutorialBranchCreated: true,
tutorialFileEdited: true,
tutorialCommitCreated: true,
tutorialBranchPushed: true,
}))
}
public recordTutorialPrCreated() {
return this.updateDailyMeasures(() => ({
tutorialEditorInstalled: true,
tutorialBranchCreated: true,
tutorialFileEdited: true,
tutorialCommitCreated: true,
tutorialBranchPushed: true,
tutorialPrCreated: true,
}))
}
public recordTutorialCompleted() {
return this.updateDailyMeasures(() => ({
tutorialCompleted: true,
}))
}
public recordHighestTutorialStepCompleted(step: number) {
return this.updateDailyMeasures(m => ({
highestTutorialStepCompleted: Math.max(
step,
m.highestTutorialStepCompleted
),
}))
}
/** Post some data to our stats endpoint. */
private post(body: object): Promise<Response> {
const options: RequestInit = {

View file

@ -6,6 +6,7 @@ import {
isConflictWithMarkers,
GitStatusEntry,
isConflictedFileStatus,
WorkingDirectoryFileChange,
} from '../models/status'
import { assertNever } from './fatal-error'
import {
@ -135,6 +136,15 @@ export function getUnmergedFiles(status: WorkingDirectoryStatus) {
return status.files.filter(f => isConflictedFile(f.status))
}
/** Filter working directory changes for untracked files */
export function getUntrackedFiles(
workingDirectoryStatus: WorkingDirectoryStatus
): ReadonlyArray<WorkingDirectoryFileChange> {
return workingDirectoryStatus.files.filter(
file => file.status.kind === AppFileStatusKind.Untracked
)
}
/** Filter working directory changes for resolved files */
export function getResolvedFiles(
status: WorkingDirectoryStatus,

View file

@ -75,22 +75,12 @@ export class AccountsStore extends TypedBaseStore<ReadonlyArray<Account>> {
/**
* Add the account to the store.
*/
public async addAccount(account: Account): Promise<void> {
public async addAccount(account: Account): Promise<Account | null> {
await this.loadingPromise
let updated = account
try {
updated = await updatedAccount(account)
} catch (e) {
log.warn(`Failed to fetch user ${account.login}`, e)
}
try {
await this.secureStore.setItem(
getKeyForAccount(updated),
updated.login,
updated.token
)
const key = getKeyForAccount(account)
await this.secureStore.setItem(key, account.login, account.token)
} catch (e) {
log.error(`Error adding account '${account.login}'`, e)
@ -103,12 +93,19 @@ export class AccountsStore extends TypedBaseStore<ReadonlyArray<Account>> {
} else {
this.emitError(e)
}
return
return null
}
this.accounts = [...this.accounts, updated]
const accountsByEndpoint = this.accounts.reduce(
(map, x) => map.set(x.endpoint, x),
new Map<string, Account>()
)
accountsByEndpoint.set(account.endpoint, account)
this.accounts = [...accountsByEndpoint.values()]
this.save()
return account
}
/** Refresh all accounts by fetching their latest info from the API. */
@ -158,7 +155,9 @@ export class AccountsStore extends TypedBaseStore<ReadonlyArray<Account>> {
return
}
this.accounts = this.accounts.filter(a => a.id !== account.id)
this.accounts = this.accounts.filter(
a => !(a.endpoint === account.endpoint && a.id === account.id)
)
this.save()
}

View file

@ -171,15 +171,16 @@ export class ApiRepositoriesStore extends BaseStore {
* the provided account has explicit permissions to access.
*/
public async loadRepositories(account: Account) {
const existingRepositories = this.accountState.get(account)
const existingAccount = resolveAccount(account, this.accountState)
const existingRepositories = this.accountState.get(existingAccount)
if (existingRepositories !== undefined && existingRepositories.loading) {
return
}
this.updateAccount(account, { loading: true })
this.updateAccount(existingAccount, { loading: true })
const api = API.fromAccount(account)
const api = API.fromAccount(existingAccount)
const repositories = await api.fetchRepositories()
if (repositories === null) {

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,7 @@ import {
} from '../../models/branch'
import { Tip, TipState } from '../../models/tip'
import { Commit } from '../../models/commit'
import { IRemote } from '../../models/remote'
import { IRemote, ForkedRemotePrefix } from '../../models/remote'
import { IFetchProgress, IRevertProgress } from '../../models/progress'
import {
ICommitMessage,
@ -63,6 +63,7 @@ import {
revSymmetricDifference,
getSymbolicRef,
getConfigValue,
removeRemote,
} from '../git'
import { RetryAction, RetryActionType } from '../../models/retry-actions'
import { UpstreamAlreadyExistsError } from './upstream-already-exists-error'
@ -77,7 +78,10 @@ import { formatCommitMessage } from '../format-commit-message'
import { GitAuthor } from '../../models/git-author'
import { IGitAccount } from '../../models/git-account'
import { BaseStore } from './base-store'
import { enablePullWithRebase } from '../feature-flag'
import { enableStashing } from '../feature-flag'
import { getStashes, getStashedFiles } from '../git/stash'
import { IStashEntry, StashedChangesLoadStates } from '../../models/stash-entry'
import { PullRequest } from '../../models/pull-request'
/** The number of commits to load from history per batch. */
const CommitBatchSize = 100
@ -128,6 +132,10 @@ export class GitStore extends BaseStore {
private _lastFetched: Date | null = null
private _desktopStashEntries = new Map<string, IStashEntry>()
private _stashEntryCount = 0
public constructor(repository: Repository, shell: IAppShell) {
super()
@ -270,11 +278,7 @@ export class GitStore extends BaseStore {
this.refreshDefaultBranch()
this.refreshRecentBranches(recentBranchNames)
// no need to query Git config if this isn't displayed in the UI
if (enablePullWithRebase()) {
this.checkPullWithRebase()
}
this.checkPullWithRebase()
const commits = this._allBranches.map(b => b.tip)
@ -967,6 +971,107 @@ export class GitStore extends BaseStore {
throw new Error(`Could not load commit: '${sha}'`)
}
/**
* Refreshes the list of GitHub Desktop created stash entries for the repository
*/
public async loadStashEntries(): Promise<void> {
if (!enableStashing()) {
return
}
const map = new Map<string, IStashEntry>()
const stash = await getStashes(this.repository)
for (const entry of stash.desktopEntries) {
// we only want the first entry we find for each branch,
// so we skip all subsequent ones
if (!map.has(entry.branchName)) {
const existing = this._desktopStashEntries.get(entry.branchName)
// If we've already loaded the files for this stash there's
// no point in us doing it again. We know the contents haven't
// changed since the SHA is the same.
if (existing !== undefined && existing.stashSha === entry.stashSha) {
map.set(entry.branchName, { ...entry, files: existing.files })
} else {
map.set(entry.branchName, entry)
}
}
}
this._desktopStashEntries = map
this._stashEntryCount = stash.stashEntryCount
this.emitUpdate()
this.loadFilesForCurrentStashEntry()
}
/**
* A GitHub Desktop created stash entries for the current branch or
* null if no entry exists
*/
public get currentBranchStashEntry() {
return this._tip && this._tip.kind === TipState.Valid
? this._desktopStashEntries.get(this._tip.branch.name) || null
: null
}
/** The total number of stash entries */
public get stashEntryCount(): number {
return this._stashEntryCount
}
/** The number of stash entries created by Desktop */
public get desktopStashEntryCount(): number {
return this._desktopStashEntries.size
}
/**
* Updates the latest stash entry with a list of files that it changes
*/
private async loadFilesForCurrentStashEntry() {
if (!enableStashing()) {
return
}
const stashEntry = this.currentBranchStashEntry
if (
!stashEntry ||
stashEntry.files.kind !== StashedChangesLoadStates.NotLoaded
) {
return
}
const { branchName } = stashEntry
this._desktopStashEntries.set(branchName, {
...stashEntry,
files: { kind: StashedChangesLoadStates.Loading },
})
this.emitUpdate()
const files = await getStashedFiles(this.repository, stashEntry.stashSha)
// It's possible that we've refreshed the list of stash entries since we
// started getStashedFiles. Load the latest entry for the branch and make
// sure the SHAs match up.
const currentEntry = this._desktopStashEntries.get(branchName)
if (!currentEntry || currentEntry.stashSha !== stashEntry.stashSha) {
return
}
this._desktopStashEntries.set(branchName, {
...currentEntry,
files: {
kind: StashedChangesLoadStates.Loaded,
files,
},
})
this.emitUpdate()
}
public async loadRemotes(): Promise<void> {
const remotes = await getRemotes(this.repository)
this._defaultRemote = findDefaultRemote(remotes)
@ -1334,4 +1439,21 @@ export class GitStore extends BaseStore {
behind: aheadBehind.behind,
}
}
public async pruneForkedRemotes(openPRs: ReadonlyArray<PullRequest>) {
const remotes = await getRemotes(this.repository)
const prRemotes = new Set<string>()
for (const pr of openPRs) {
if (pr.head.gitHubRepository.cloneURL !== null) {
prRemotes.add(pr.head.gitHubRepository.cloneURL)
}
}
for (const r of remotes) {
if (r.name.startsWith(ForkedRemotePrefix) && !prRemotes.has(r.url)) {
await removeRemote(this.repository, r.name)
}
}
}
}

View file

@ -4,11 +4,11 @@ import { Branch } from '../../../models/branch'
import { GitStoreCache } from '../git-store-cache'
import {
getMergedBranches,
getCheckoutsAfterDate,
getBranchCheckouts,
getSymbolicRef,
IMergedBranch,
formatAsLocalRef,
deleteLocalBranch,
getBranches,
} from '../../git'
import { fatalError } from '../../fatal-error'
import { RepositoryStateCache } from '../repository-state-cache'
@ -28,6 +28,31 @@ const ReservedRefs = [
'refs/heads/release',
]
/**
* Behavior flags for the branch prune execution, to aid with testing and
* verifying locally.
*/
type PruneRuntimeOptions = {
/**
* By default the branch pruner will only run every 24 hours
*
* Set this flag to `false` to ignore this check.
*/
readonly enforcePruneThreshold: boolean
/**
* By default the branch pruner will also delete the branches it believes can
* be pruned safely.
*
* Set this to `false` to keep these in your repository.
*/
readonly deleteBranch: boolean
}
const DefaultPruneOptions: PruneRuntimeOptions = {
enforcePruneThreshold: true,
deleteBranch: true,
}
export class BranchPruner {
private timer: number | null = null
@ -48,9 +73,9 @@ export class BranchPruner {
)
}
await this.pruneLocalBranches()
await this.pruneLocalBranches(DefaultPruneOptions)
this.timer = window.setInterval(
() => this.pruneLocalBranches(),
() => this.pruneLocalBranches(DefaultPruneOptions),
BackgroundPruneMinimumInterval
)
}
@ -64,31 +89,47 @@ export class BranchPruner {
this.timer = null
}
public async testPrune(): Promise<void> {
return this.pruneLocalBranches({
enforcePruneThreshold: false,
deleteBranch: false,
})
}
/** @returns a map of canonical refs to their shas */
private async findBranchesMergedIntoDefaultBranch(
repository: Repository,
defaultBranch: Branch
): Promise<ReadonlyArray<IMergedBranch>> {
): Promise<ReadonlyMap<string, string>> {
const gitStore = this.gitStoreCache.get(repository)
const mergedBranches = await gitStore.performFailableOperation(() =>
getMergedBranches(repository, defaultBranch.name)
)
if (mergedBranches === undefined) {
return []
return new Map<string, string>()
}
const currentBranchCanonicalRef = await getSymbolicRef(repository, 'HEAD')
// remove the current branch
return currentBranchCanonicalRef === null
? mergedBranches
: mergedBranches.filter(
mb => mb.canonicalRef !== currentBranchCanonicalRef
)
if (currentBranchCanonicalRef) {
mergedBranches.delete(currentBranchCanonicalRef)
}
return mergedBranches
}
private async pruneLocalBranches(): Promise<void> {
if (this.repository.gitHubRepository === null) {
/**
* Prune the local branches for the repository
*
* @param options configure the behaviour of the branch pruning process
*/
private async pruneLocalBranches(
options: PruneRuntimeOptions
): Promise<void> {
const { gitHubRepository } = this.repository
if (gitHubRepository === null) {
return
}
@ -103,18 +144,28 @@ export class BranchPruner {
// Using type coelescing behavior to deal with Dexie returning `undefined`
// for records that haven't been updated with the new field yet
if (lastPruneDate != null && threshold.isBefore(lastPruneDate)) {
if (
options.enforcePruneThreshold &&
lastPruneDate != null &&
threshold.isBefore(lastPruneDate)
) {
log.info(
`Last prune took place ${moment(lastPruneDate).from(
`[BranchPruner] Last prune took place ${moment(lastPruneDate).from(
dateNow
)} - skipping`
)
return
}
// update the last prune date first thing after we check it!
await this.repositoriesStore.updateLastPruneDate(
this.repository,
Date.now()
)
// Get list of branches that have been merged
const { branchesState } = this.repositoriesStateCache.get(this.repository)
const { defaultBranch } = branchesState
const { defaultBranch, allBranches } = branchesState
if (defaultBranch === null) {
return
@ -125,8 +176,8 @@ export class BranchPruner {
defaultBranch
)
if (mergedBranches.length === 0) {
log.info('No branches to prune.')
if (mergedBranches.size === 0) {
log.info('[BranchPruner] No branches to prune.')
return
}
@ -134,7 +185,7 @@ export class BranchPruner {
const twoWeeksAgo = moment()
.subtract(2, 'weeks')
.toDate()
const recentlyCheckedOutBranches = await getCheckoutsAfterDate(
const recentlyCheckedOutBranches = await getBranchCheckouts(
this.repository,
twoWeeksAgo
)
@ -142,17 +193,31 @@ export class BranchPruner {
[...recentlyCheckedOutBranches.keys()].map(formatAsLocalRef)
)
// Create array of branches that can be pruned
const candidateBranches = mergedBranches.filter(
mb => !ReservedRefs.includes(mb.canonicalRef)
)
// get the locally cached branches of remotes (ie `remotes/origin/master`)
const remoteBranches = (await getBranches(
this.repository,
`refs/remotes/`
)).map(b => formatAsLocalRef(b.name))
const branchesReadyForPruning = candidateBranches.filter(
mb => !recentlyCheckedOutCanonicalRefs.has(mb.canonicalRef)
// create list of branches to be pruned
const branchesReadyForPruning = Array.from(mergedBranches.keys()).filter(
ref => {
if (ReservedRefs.includes(ref)) {
return false
}
if (recentlyCheckedOutCanonicalRefs.has(ref)) {
return false
}
const upstreamRef = getUpstreamRefForLocalBranchRef(ref, allBranches)
if (upstreamRef === undefined) {
return false
}
return !remoteBranches.includes(upstreamRef)
}
)
log.info(
`Pruning ${
`[BranchPruner] Pruning ${
branchesReadyForPruning.length
} branches that have been merged into the default branch, ${
defaultBranch.name
@ -162,26 +227,51 @@ export class BranchPruner {
const gitStore = this.gitStoreCache.get(this.repository)
const branchRefPrefix = `refs/heads/`
for (const branch of branchesReadyForPruning) {
if (!branch.canonicalRef.startsWith(branchRefPrefix)) {
for (const branchCanonicalRef of branchesReadyForPruning) {
if (!branchCanonicalRef.startsWith(branchRefPrefix)) {
continue
}
const branchName = branch.canonicalRef.substr(branchRefPrefix.length)
const branchName = branchCanonicalRef.substr(branchRefPrefix.length)
const isDeleted = await gitStore.performFailableOperation(() =>
deleteLocalBranch(this.repository, branchName)
)
if (options.deleteBranch) {
const isDeleted = await gitStore.performFailableOperation(() =>
deleteLocalBranch(this.repository, branchName)
)
if (isDeleted) {
log.info(`Pruned branch ${branchName} (was ${branch.sha})`)
if (isDeleted) {
log.info(
`[BranchPruner] Pruned branch ${branchName} ((was ${mergedBranches.get(
branchCanonicalRef
)}))`
)
}
} else {
log.info(`[BranchPruner] Branch '${branchName}' marked for deletion`)
}
}
await this.repositoriesStore.updateLastPruneDate(
this.repository,
Date.now()
)
this.onPruneCompleted(this.repository)
}
}
/**
* @param ref the canonical ref for a local branch
* @param allBranches a list of all branches in the Repository model
* @returns the canonical upstream branch ref or undefined if upstream can't be reliably determined
*/
function getUpstreamRefForLocalBranchRef(
ref: string,
allBranches: ReadonlyArray<Branch>
): string | undefined {
const branch = allBranches.find(b => formatAsLocalRef(b.name) === ref)
// if we can't find a branch model, we can't determine the ref's upstream
if (branch === undefined) {
return undefined
}
const { upstream } = branch
// if there's no upstream in the branch, there's nothing to lookup
if (upstream === null) {
return undefined
}
return formatAsLocalRef(upstream)
}

View file

@ -0,0 +1,33 @@
import { Tip, TipState } from '../../../models/tip'
import { IRemote } from '../../../models/remote'
import { GitHubRepository } from '../../../models/github-repository'
import { urlMatchesCloneURL } from '../../repository-matching'
/**
* Function to determine which branch name to use when looking for branch
* protection information.
*
* If the remote branch matches the current `githubRepository` associated with
* the repostiory, this will be used. Otherwise we will fall back to using the
* branch name as that's a reasonable approximation for what would happen if the
* user tries to push the new branch.
*/
export function findRemoteBranchName(
tip: Tip,
remote: IRemote | null,
gitHubRepository: GitHubRepository
): string | null {
if (tip.kind !== TipState.Valid) {
return null
}
if (
tip.branch.upstreamWithoutRemote !== null &&
remote !== null &&
urlMatchesCloneURL(remote.url, gitHubRepository)
) {
return tip.branch.upstreamWithoutRemote
}
return tip.branch.nameWithoutRemote
}

View file

@ -0,0 +1,48 @@
import { IBranchesState } from '../../app-state'
import { eligibleForFastForward, Branch } from '../../../models/branch'
import { TipState } from '../../../models/tip'
/**
* As fast-forwarding local branches is proportional to the number of local
* branches, and is run after every fetch/push/pull, this is skipped when the
* number of eligible branches is greater than a given threshold.
*/
const FastForwardBranchesThreshold = 20
/** Figured out what branches are eligible to fast forward
*
* If all eligible branches count is more than `FastForwardBranchesThreshold`,
* returns a shorter list of default and recent branches
*
* @param branchesState current branchesState for a repository
* @returns list of branches eligible for fast forward
*/
export function findBranchesForFastForward(
branchesState: IBranchesState
): ReadonlyArray<Branch> {
const { allBranches, tip, defaultBranch, recentBranches } = branchesState
const currentBranchName = tip.kind === TipState.Valid ? tip.branch.name : null
const allEligibleBranches = allBranches.filter(b =>
eligibleForFastForward(b, currentBranchName)
)
if (allEligibleBranches.length < FastForwardBranchesThreshold) {
return allEligibleBranches
}
log.info(
`skipping fast-forward for all branches as there are ${
allEligibleBranches.length
} eligible branches (Threshold is ${FastForwardBranchesThreshold} eligible branches).`
)
// we don't have to worry about this being a duplicate, because recent branches
// never include the default branch (at least right now)
const shortListBranches =
defaultBranch !== null ? [...recentBranches, defaultBranch] : recentBranches
const eligibleShortListBranches = shortListBranches.filter(b =>
eligibleForFastForward(b, currentBranchName)
)
return eligibleShortListBranches
}

View file

@ -1,65 +1,80 @@
import { PullRequestStore } from '../pull-request-store'
import { Account } from '../../../models/account'
import { fatalError } from '../../fatal-error'
import { Repository } from '../../../models/repository'
import { GitHubRepository } from '../../../models/github-repository'
//** Interval to check for pull requests */
const PullRequestInterval = 1000 * 60 * 10
enum TimeoutHandles {
PullRequest = 'PullRequestHandle',
Status = 'StatusHandle',
PushedPullRequest = 'PushedPullRequestHandle',
}
/** Check for new or updated pull requests every 30 minutes */
const PullRequestInterval = 30 * 60 * 1000
/**
* Acts as a service for downloading the latest pull request
* and status info from GitHub.
* Never check for new or updated pull requests more
* frequently than every 2 minutes
*/
const MaxPullRequestRefreshFrequency = 2 * 60 * 1000
/**
* Periodically requests a refresh of the list of open pull requests
* for a particular GitHub repository. The intention is for the
* updater to only run when the app is in focus. When the updater
* is started (in other words when the app is focused) it will
* refresh the list of open pull requests as soon as possible while
* ensuring that we never update more frequently than the value
* indicated by the `MaxPullRequestRefreshFrequency` variable.
*/
export class PullRequestUpdater {
private readonly repository: Repository
private readonly account: Account
private readonly store: PullRequestStore
private readonly timeoutHandles = new Map<TimeoutHandles, number>()
private isStopped: boolean = true
private timeoutId: number | null = null
private running = false
public constructor(
repository: Repository,
account: Account,
pullRequestStore: PullRequestStore
) {
this.repository = repository
this.account = account
this.store = pullRequestStore
}
private readonly repository: GitHubRepository,
private readonly account: Account,
private readonly store: PullRequestStore
) {}
/** Starts the updater */
public start() {
if (!this.isStopped) {
fatalError(
'Cannot start the Pull Request Updater that is already running.'
)
if (!this.running) {
this.running = true
this.scheduleTick(MaxPullRequestRefreshFrequency)
}
}
private getTimeSinceLastRefresh() {
const lastRefreshed = this.store.getLastRefreshed(this.repository)
const timeSince =
lastRefreshed === undefined ? Infinity : Date.now() - lastRefreshed
return timeSince
}
private scheduleTick(timeout: number = PullRequestInterval) {
if (this.running) {
const due = Math.max(timeout - this.getTimeSinceLastRefresh(), 0)
this.timeoutId = window.setTimeout(() => this.tick(), due)
}
}
private tick() {
if (!this.running) {
return
}
this.timeoutHandles.set(
TimeoutHandles.PullRequest,
this.timeoutId = null
if (this.getTimeSinceLastRefresh() < MaxPullRequestRefreshFrequency) {
this.scheduleTick()
}
window.setTimeout(() => {
this.store.fetchAndCachePullRequests(this.repository, this.account)
}, PullRequestInterval)
)
this.store
.refreshPullRequests(this.repository, this.account)
.catch(() => {})
.then(() => this.scheduleTick())
}
public stop() {
this.isStopped = true
for (const timeoutHandle of this.timeoutHandles.values()) {
window.clearTimeout(timeoutHandle)
if (this.running) {
if (this.timeoutId !== null) {
window.clearTimeout(this.timeoutId)
this.timeoutId = null
}
this.running = false
}
this.timeoutHandles.clear()
}
}

View file

@ -0,0 +1,172 @@
import { IRepositoryState } from '../../app-state'
import { TutorialStep } from '../../../models/tutorial-step'
import { TipState } from '../../../models/tip'
import { ExternalEditor } from '../../editors'
import { setBoolean, getBoolean } from '../../local-storage'
const skipInstallEditorKey = 'tutorial-install-editor-skipped'
const pullRequestStepCompleteKey = 'tutorial-pull-request-step-complete'
const tutorialPausedKey = 'tutorial-paused'
/**
* Used to determine which step of the onboarding
* tutorial the user needs to complete next
*
* Stores some state that only it needs to know about. The
* actual step result is stored in App Store so the rest of
* the app can access it.
*/
export class OnboardingTutorialAssessor {
/** Has the user opted to skip the install editor step? */
private installEditorSkipped: boolean = getBoolean(
skipInstallEditorKey,
false
)
/** Has the user opted to skip the create pull request step? */
private prStepComplete: boolean = getBoolean(
pullRequestStepCompleteKey,
false
)
/** Is the tutorial currently paused? */
private tutorialPaused: boolean = getBoolean(tutorialPausedKey, false)
public constructor(
/** Method to call when we need to get the current editor */
private getResolvedExternalEditor: () => ExternalEditor | null
) {}
/** Determines what step the user needs to complete next in the Onboarding Tutorial */
public async getCurrentStep(
isTutorialRepo: boolean,
repositoryState: IRepositoryState
): Promise<TutorialStep> {
if (!isTutorialRepo) {
// If a new repo has been added, we can unpause the tutorial repo
// as we will no longer present the no-repos blank slate view resume button
// Fixes https://github.com/desktop/desktop/issues/8341
if (this.tutorialPaused) {
this.resumeTutorial()
}
return TutorialStep.NotApplicable
} else if (this.tutorialPaused) {
return TutorialStep.Paused
} else if (!(await this.isEditorInstalled())) {
return TutorialStep.PickEditor
} else if (!this.isBranchCheckedOut(repositoryState)) {
return TutorialStep.CreateBranch
} else if (!this.hasChangedFile(repositoryState)) {
return TutorialStep.EditFile
} else if (!this.hasMultipleCommits(repositoryState)) {
return TutorialStep.MakeCommit
} else if (!this.commitPushed(repositoryState)) {
return TutorialStep.PushBranch
} else if (!this.pullRequestCreated(repositoryState)) {
return TutorialStep.OpenPullRequest
} else {
return TutorialStep.AllDone
}
}
private async isEditorInstalled(): Promise<boolean> {
return (
this.installEditorSkipped || this.getResolvedExternalEditor() !== null
)
}
private isBranchCheckedOut(repositoryState: IRepositoryState): boolean {
const { branchesState } = repositoryState
const { tip } = branchesState
const currentBranchName =
tip.kind === TipState.Valid ? tip.branch.name : null
const defaultBranchName =
branchesState.defaultBranch !== null
? branchesState.defaultBranch.name
: null
return (
currentBranchName !== null &&
defaultBranchName !== null &&
currentBranchName !== defaultBranchName
)
}
private hasChangedFile(repositoryState: IRepositoryState): boolean {
if (this.hasMultipleCommits(repositoryState)) {
// User has already committed a change
return true
}
const { changesState } = repositoryState
return changesState.workingDirectory.files.length > 0
}
private hasMultipleCommits(repositoryState: IRepositoryState): boolean {
const { branchesState } = repositoryState
const { tip } = branchesState
if (tip.kind === TipState.Valid) {
// For some reason sometimes the initial commit has a parent sha
// listed as an empty string...
// For now I'm filtering those out. Would be better to prevent that from happening
return tip.branch.tip.parentSHAs.some(x => x.length > 0)
}
return false
}
private commitPushed(repositoryState: IRepositoryState): boolean {
const { aheadBehind } = repositoryState
return aheadBehind !== null && aheadBehind.ahead === 0
}
private pullRequestCreated(repositoryState: IRepositoryState): boolean {
// If we see a PR at any point let's persist that. This is for the
// edge case where a user leaves the app to manually create the PR
if (repositoryState.branchesState.currentPullRequest !== null) {
this.markPullRequestTutorialStepAsComplete()
}
return this.prStepComplete
}
/** Call when the user opts to skip the install editor step */
public skipPickEditor = () => {
this.installEditorSkipped = true
setBoolean(skipInstallEditorKey, this.installEditorSkipped)
}
/**
* Call when the user has either created a pull request or opts to
* skip the create pull request step of the onboarding tutorial
*/
public markPullRequestTutorialStepAsComplete = () => {
this.prStepComplete = true
setBoolean(pullRequestStepCompleteKey, this.prStepComplete)
}
/**
* Call when a new tutorial repository is created
*
* (Resets its internal skipped steps state.)
*/
public onNewTutorialRepository = () => {
this.installEditorSkipped = false
localStorage.removeItem(skipInstallEditorKey)
this.prStepComplete = false
localStorage.removeItem(pullRequestStepCompleteKey)
this.tutorialPaused = false
localStorage.removeItem(tutorialPausedKey)
}
/** Call when the user pauses the tutorial */
public pauseTutorial() {
this.tutorialPaused = true
setBoolean(tutorialPausedKey, this.tutorialPaused)
}
/** Call when the user resumes the tutorial */
public resumeTutorial() {
this.tutorialPaused = false
setBoolean(tutorialPausedKey, this.tutorialPaused)
}
}

View file

@ -1,229 +1,334 @@
import { PullRequestDatabase, IPullRequest } from '../databases'
import mem from 'mem'
import {
PullRequestDatabase,
IPullRequest,
PullRequestKey,
getPullRequestKey,
} from '../databases/pull-request-database'
import { GitHubRepository } from '../../models/github-repository'
import { Account } from '../../models/account'
import { API, IAPIPullRequest } from '../api'
import { fatalError, forceUnwrap } from '../fatal-error'
import { API, IAPIPullRequest, MaxResultsError } from '../api'
import { fatalError } from '../fatal-error'
import { RepositoriesStore } from './repositories-store'
import { PullRequest, PullRequestRef } from '../../models/pull-request'
import { TypedBaseStore } from './base-store'
import { Repository } from '../../models/repository'
import { getRemotes, removeRemote } from '../git'
import { IRemote, ForkedRemotePrefix } from '../../models/remote'
const Decrement = (n: number) => n - 1
const Increment = (n: number) => n + 1
import { structuralEquals } from '../equality'
import { Emitter, Disposable } from 'event-kit'
import { APIError } from '../http'
/** The store for GitHub Pull Requests. */
export class PullRequestStore extends TypedBaseStore<GitHubRepository> {
private readonly pullRequestDatabase: PullRequestDatabase
private readonly repositoryStore: RepositoriesStore
private readonly activeFetchCountPerRepository = new Map<number, number>()
export class PullRequestStore {
protected readonly emitter = new Emitter()
private readonly currentRefreshOperations = new Map<number, Promise<void>>()
private readonly lastRefreshForRepository = new Map<number, number>()
public constructor(
db: PullRequestDatabase,
repositoriesStore: RepositoriesStore
) {
super()
private readonly db: PullRequestDatabase,
private readonly repositoryStore: RepositoriesStore
) {}
this.pullRequestDatabase = db
this.repositoryStore = repositoriesStore
private emitPullRequestsChanged(
repository: GitHubRepository,
pullRequests: ReadonlyArray<PullRequest>
) {
this.emitter.emit('onPullRequestsChanged', { repository, pullRequests })
}
/** Register a function to be called when the store updates. */
public onPullRequestsChanged(
fn: (
repository: GitHubRepository,
pullRequests: ReadonlyArray<PullRequest>
) => void
): Disposable {
return this.emitter.on('onPullRequestsChanged', value => {
const { repository, pullRequests } = value
fn(repository, pullRequests)
})
}
private emitIsLoadingPullRequests(
repository: GitHubRepository,
isLoadingPullRequests: boolean
) {
this.emitter.emit('onIsLoadingPullRequest', {
repository,
isLoadingPullRequests,
})
}
/** Register a function to be called when the store updates. */
public onIsLoadingPullRequests(
fn: (repository: GitHubRepository, isLoadingPullRequests: boolean) => void
): Disposable {
return this.emitter.on('onIsLoadingPullRequest', value => {
const { repository, isLoadingPullRequests } = value
fn(repository, isLoadingPullRequests)
})
}
/** Loads all pull requests against the given repository. */
public async fetchAndCachePullRequests(
repository: Repository,
public refreshPullRequests(repo: GitHubRepository, account: Account) {
const dbId = repo.dbID
if (dbId === null) {
// This can happen when the `repositoryWithRefreshedGitHubRepository`
// method in AppStore fails to retrieve API information about the current
// repository either due to the user being signed out or the API failing
// to provide a response. There's nothing for us to do when that happens
// so instead of crashing we'll bail here.
return Promise.resolve()
}
const currentOp = this.currentRefreshOperations.get(dbId)
if (currentOp !== undefined) {
return currentOp
}
this.lastRefreshForRepository.set(dbId, Date.now())
this.emitIsLoadingPullRequests(repo, true)
const promise = this.fetchAndStorePullRequests(repo, account)
.catch(err => {
log.error(`Error refreshing pull requests for '${repo.fullName}'`, err)
})
.then(() => {
this.currentRefreshOperations.delete(dbId)
this.emitIsLoadingPullRequests(repo, false)
})
this.currentRefreshOperations.set(dbId, promise)
return promise
}
/**
* Fetches pull requests from the API (either all open PRs if it's the
* first time fetching for this repository or all updated PRs if not).
*
* Returns a value indicating whether it's safe to avoid
* emitting an event that the store has been updated. In other words, when
* this method returns false it's safe to say that nothing has been changed
* in the pull requests table.
*/
private async fetchAndStorePullRequests(
repo: GitHubRepository,
account: Account
): Promise<void> {
const githubRepo = forceUnwrap(
'Can only refresh pull requests for GitHub repositories',
repository.gitHubRepository
)
const apiClient = API.fromAccount(account)
) {
const api = API.fromAccount(account)
const lastUpdatedAt = await this.db.getLastUpdated(repo)
this.updateActiveFetchCount(githubRepo, Increment)
try {
const apiResult = await apiClient.fetchPullRequests(
githubRepo.owner.login,
githubRepo.name,
'open'
)
await this.cachePullRequests(apiResult, githubRepo)
const prs = await this.fetchPullRequestsFromCache(githubRepo)
await this.pruneForkedRemotes(repository, prs)
this.emitUpdate(githubRepo)
} catch (error) {
log.warn(`Error refreshing pull requests for '${repository.name}'`, error)
} finally {
this.updateActiveFetchCount(githubRepo, Decrement)
// If we don't have a lastUpdatedAt that mean we haven't fetched any PRs
// for the repository yet which in turn means we only have to fetch the
// currently open PRs. If we have fetched before we get all PRs
// If we have a lastUpdatedAt that mean we have fetched PRs
// for the repository before. If we have fetched before we get all PRs
// that have been modified since the last time we fetched so that we
// can prune closed issues from our database. Note that since
// `api.fetchUpdatedPullRequests` returns all issues modified _at_ or
// after the timestamp we give it we will always get at least one issue
// back. See `storePullRequests` for details on how that's handled.
if (!lastUpdatedAt) {
return this.fetchAndStoreOpenPullRequests(api, repo)
} else {
return this.fetchAndStoreUpdatedPullRequests(api, repo, lastUpdatedAt)
}
}
/** Is the store currently fetching the list of open pull requests? */
public isFetchingPullRequests(repository: GitHubRepository): boolean {
const repoDbId = forceUnwrap(
'Cannot fetch PRs for a repository which is not in the database',
repository.dbID
)
const currentCount = this.activeFetchCountPerRepository.get(repoDbId) || 0
return currentCount > 0
}
/** Gets the pull requests against the given repository. */
public async fetchPullRequestsFromCache(
private async fetchAndStoreOpenPullRequests(
api: API,
repository: GitHubRepository
): Promise<ReadonlyArray<PullRequest>> {
const gitHubRepositoryID = repository.dbID
) {
const { name, owner } = getNameWithOwner(repository)
const open = await api.fetchAllOpenPullRequests(owner, name)
await this.storePullRequestsAndEmitUpdate(open, repository)
}
if (gitHubRepositoryID == null) {
return fatalError(
"Cannot get pull requests for a repository that hasn't been inserted into the database!"
private async fetchAndStoreUpdatedPullRequests(
api: API,
repository: GitHubRepository,
lastUpdatedAt: Date
) {
const { name, owner } = getNameWithOwner(repository)
const updated = await api
.fetchUpdatedPullRequests(owner, name, lastUpdatedAt)
.catch(e =>
// Any other error we'll bubble up but these ones we
// can handle, see below.
e instanceof MaxResultsError || e instanceof APIError
? Promise.resolve(null)
: Promise.reject(e)
)
if (updated !== null) {
return await this.storePullRequestsAndEmitUpdate(updated, repository)
} else {
// If we fail to load updated pull requests either because
// there's too many updated PRs since the last time we
// fetched (and it's likely that it'll be much more
// efficient to just load the open PRs) or it's because the
// API told us we couldn't load PRs (rate limit or permissions
// problems). In either case we delete the PRs we've got
// for this repo and attempt to load just the open ones.
//
// This scenario can happen for repositories that are
// very active while simultaneously infrequently used
// by the user. Think of a very active open source repository
// where the user only visits once a year to make a contribution.
// It's likely that there's at most a few hundred PRs open but
// the number of merged PRs since the last time we fetched could
// number in the thousands.
await this.db.deleteAllPullRequestsInRepository(repository)
await this.fetchAndStoreOpenPullRequests(api, repository)
}
}
public getLastRefreshed(repository: GitHubRepository) {
return repository.dbID
? this.lastRefreshForRepository.get(repository.dbID)
: undefined
}
/** Gets all stored pull requests for the given repository. */
public async getAll(repository: GitHubRepository) {
if (repository.dbID === null) {
// This can happen when the `repositoryWithRefreshedGitHubRepository`
// method in AppStore fails to retrieve API information about the current
// repository either due to the user being signed out or the API failing
// to provide a response. There's nothing for us to do when that happens
// so instead of crashing we'll bail here.
return []
}
const records = await this.pullRequestDatabase.pullRequests
.where('base.repoId')
.equals(gitHubRepositoryID)
.reverse()
.sortBy('number')
const records = await this.db.getAllPullRequestsInRepository(repository)
const result = new Array<PullRequest>()
for (const record of records) {
const repositoryDbId = record.head.repoId
let githubRepository: GitHubRepository | null = null
// In order to avoid what would otherwise be a very expensive
// N+1 (N+2 really) query where we look up the head and base
// GitHubRepository from IndexedDB for each pull request we'll memoize
// already retrieved GitHubRepository instances.
//
// This optimization decreased the run time of this method from 6
// seconds to just under 26 ms while testing using an internal
// repository with 1k+ PRs. Even in the worst-case scenario (i.e
// a repository with a huge number of open PRs from forks) this
// will reduce the N+2 to N+1.
const store = this.repositoryStore
const getRepo = mem(store.findGitHubRepositoryByID.bind(store))
if (repositoryDbId != null) {
githubRepository = await this.repositoryStore.findGitHubRepositoryByID(
repositoryDbId
)
for (const record of records) {
const headRepository = await getRepo(record.head.repoId)
const baseRepository = await getRepo(record.base.repoId)
if (headRepository === null) {
return fatalError("head repository can't be null")
}
// We know the base repo ID can't be null since it's the repository we
// fetched the PR from in the first place.
const parentRepositoryDbId = forceUnwrap(
'A pull request cannot have a null base repo id',
record.base.repoId
)
const parentGitGubRepository: GitHubRepository | null = await this.repositoryStore.findGitHubRepositoryByID(
parentRepositoryDbId
)
const parentGitHubRepository = forceUnwrap(
'PR cannot have a null base repo',
parentGitGubRepository
)
// We can be certain the PR ID is valid since we just got it from the
// database.
const pullRequestDbId = forceUnwrap(
'PR cannot have a null ID after being retrieved from the database',
record.id
)
if (baseRepository === null) {
return fatalError("base repository can't be null")
}
result.push(
new PullRequest(
pullRequestDbId,
new Date(record.createdAt),
record.title,
record.number,
new PullRequestRef(
record.head.ref,
record.head.sha,
githubRepository
),
new PullRequestRef(
record.base.ref,
record.base.sha,
parentGitHubRepository
),
new PullRequestRef(record.head.ref, record.head.sha, headRepository),
new PullRequestRef(record.base.ref, record.base.sha, baseRepository),
record.author
)
)
}
return result
// Reversing the results in place manually instead of using
// .reverse on the IndexedDB query has been measured to have favorable
// performance characteristics for repositories with a lot of pull
// requests since it means Dexie is able to leverage the IndexedDB
// getAll method as opposed to creating a reverse cursor. Reversing
// in place versus unshifting is also dramatically more performant.
return result.reverse()
}
private async pruneForkedRemotes(
repository: Repository,
pullRequests: ReadonlyArray<PullRequest>
) {
const remotes = await getRemotes(repository)
const forkedRemotesToDelete = this.getRemotesToDelete(remotes, pullRequests)
await this.deleteRemotes(repository, forkedRemotesToDelete)
}
private getRemotesToDelete(
remotes: ReadonlyArray<IRemote>,
openPullRequests: ReadonlyArray<PullRequest>
): ReadonlyArray<IRemote> {
const forkedRemotes = remotes.filter(remote =>
remote.name.startsWith(ForkedRemotePrefix)
)
const remotesOfPullRequests = new Set<string>()
openPullRequests.forEach(pr => {
const { gitHubRepository } = pr.head
if (gitHubRepository != null && gitHubRepository.cloneURL != null) {
remotesOfPullRequests.add(gitHubRepository.cloneURL)
}
})
const result = forkedRemotes.filter(
forkedRemote => !remotesOfPullRequests.has(forkedRemote.url)
)
return result
}
private async deleteRemotes(
repository: Repository,
remotes: ReadonlyArray<IRemote>
) {
const promises: Array<Promise<void>> = []
remotes.forEach(r => promises.push(removeRemote(repository, r.name)))
await Promise.all(promises)
}
private updateActiveFetchCount(
repository: GitHubRepository,
update: (count: number) => number
) {
const repoDbId = forceUnwrap(
'Cannot fetch PRs for a repository which is not in the database',
repository.dbID
)
const currentCount = this.activeFetchCountPerRepository.get(repoDbId) || 0
const newCount = update(currentCount)
this.activeFetchCountPerRepository.set(repoDbId, newCount)
this.emitUpdate(repository)
}
private async cachePullRequests(
/**
* Stores all pull requests that are open and deletes all that are merged
* or closed. Returns a value indicating whether an update notification
* has been emitted, see `storePullRequests` for more details.
*/
private async storePullRequestsAndEmitUpdate(
pullRequestsFromAPI: ReadonlyArray<IAPIPullRequest>,
repository: GitHubRepository
): Promise<void> {
const repoDbId = repository.dbID
) {
if (await this.storePullRequests(pullRequestsFromAPI, repository)) {
this.emitPullRequestsChanged(repository, await this.getAll(repository))
}
}
if (repoDbId == null) {
return fatalError(
"Cannot store pull requests for a repository that hasn't been inserted into the database!"
)
/**
* Stores all pull requests that are open and deletes all that are merged
* or closed. Returns a value indicating whether it's safe to avoid
* emitting an event that the store has been updated. In other words, when
* this method returns false it's safe to say that nothing has been changed
* in the pull requests table.
*/
private async storePullRequests(
pullRequestsFromAPI: ReadonlyArray<IAPIPullRequest>,
repository: GitHubRepository
) {
if (pullRequestsFromAPI.length === 0) {
return false
}
const table = this.pullRequestDatabase.pullRequests
const prsToInsert = new Array<IPullRequest>()
let mostRecentlyUpdated = pullRequestsFromAPI[0].updated_at
const prsToDelete = new Array<PullRequestKey>()
const prsToUpsert = new Array<IPullRequest>()
// The API endpoint for this PR, i.e api.github.com or a GHE url
const { endpoint } = repository
const store = this.repositoryStore
// Upsert will always query the database for a repository. Given that
// we've receive these repositories in a batch response from the API
// it's pretty unlikely that they'd differ between PRs so we're going
// to use the upsert just to ensure that the repo exists in the database
// and reuse the same object without going to the database for all that
// follow.
const upsertRepo = mem(store.upsertGitHubRepository.bind(store), {
// The first argument which we're ignoring here is the endpoint
// which is constant throughout the lifetime of this function.
// The second argument is an `IAPIRepository` which is basically
// the raw object that we got from the API which could consist of
// more than just the fields we've modelled in the interface. The
// only thing we really care about to determine whether the
// repository has already been inserted in the database is the clone
// url since that's what the upsert method uses as its key.
cacheKey: (_, repo) => repo.clone_url,
})
for (const pr of pullRequestsFromAPI) {
// We can do this string comparison here rather than convert to date
// because ISO8601 is lexicographically sortable
if (pr.updated_at > mostRecentlyUpdated) {
mostRecentlyUpdated = pr.updated_at
}
// We know the base repo isn't null since that's where we got the PR from
// in the first place.
if (pr.base.repo === null) {
return fatalError('PR cannot have a null base repo')
}
const baseGitHubRepo = await upsertRepo(endpoint, pr.base.repo)
if (baseGitHubRepo.dbID === null) {
return fatalError('PR cannot have a null parent database id')
}
if (pr.state === 'closed') {
prsToDelete.push(getPullRequestKey(baseGitHubRepo, pr.number))
continue
}
// `pr.head.repo` represents the source of the pull request. It might be
// a branch associated with the current repository, or a fork of the
// current repository.
@ -231,71 +336,73 @@ export class PullRequestStore extends TypedBaseStore<GitHubRepository> {
// In cases where the user has removed the fork of the repository after
// opening a pull request, this can be `null`, and the app will not store
// this pull request.
if (pr.head.repo == null) {
log.debug(
`Unable to store pull request #${pr.number} for repository ${
repository.fullName
} as it has no head repository associated with it`
)
prsToDelete.push(getPullRequestKey(baseGitHubRepo, pr.number))
continue
}
const githubRepo = await this.repositoryStore.upsertGitHubRepository(
repository.endpoint,
pr.head.repo
)
const headRepo = await upsertRepo(endpoint, pr.head.repo)
const githubRepoDbId = forceUnwrap(
'PR cannot have non-existent repo',
githubRepo.dbID
)
if (headRepo.dbID === null) {
return fatalError('PR cannot have non-existent repo')
}
// We know the base repo isn't null since that's where we got the PR from
// in the first place.
const parentRepo = forceUnwrap(
'PR cannot have a null base repo',
pr.base.repo
)
const parentGitHubRepo = await this.repositoryStore.upsertGitHubRepository(
repository.endpoint,
parentRepo
)
const parentGitHubRepoDbId = forceUnwrap(
'PR cannot have a null parent database id',
parentGitHubRepo.dbID
)
prsToInsert.push({
prsToUpsert.push({
number: pr.number,
title: pr.title,
createdAt: pr.created_at,
updatedAt: pr.updated_at,
head: {
ref: pr.head.ref,
sha: pr.head.sha,
repoId: githubRepoDbId,
repoId: headRepo.dbID,
},
base: {
ref: pr.base.ref,
sha: pr.base.sha,
repoId: parentGitHubRepoDbId,
repoId: baseGitHubRepo.dbID,
},
author: pr.user.login,
})
}
return this.pullRequestDatabase.transaction('rw', table, async () => {
// we need to delete the stales PRs from the db
// so we remove all for a repo to avoid having to
// do diffing
await table
.where('base.repoId')
.equals(repoDbId)
.delete()
// When loading only PRs that has changed since the last fetch
// we get back all PRs modified _at_ or after the timestamp we give it
// meaning we will always get at least one issue back but. This
// check detect this particular condition and lets us avoid expensive
// branch pruning and updates for a single PR that hasn't actually
// been updated.
if (prsToDelete.length === 0 && prsToUpsert.length === 1) {
const cur = prsToUpsert[0]
const prev = await this.db.getPullRequest(repository, cur.number)
if (prsToInsert.length > 0) {
await table.bulkAdd(prsToInsert)
if (prev !== undefined && structuralEquals(cur, prev)) {
return false
}
})
}
await this.db.transaction(
'rw',
this.db.pullRequests,
this.db.pullRequestsLastUpdated,
async () => {
await this.db.deletePullRequests(prsToDelete)
await this.db.putPullRequests(prsToUpsert)
await this.db.setLastUpdated(repository, new Date(mostRecentlyUpdated))
}
)
return true
}
}
function getNameWithOwner(repository: GitHubRepository) {
const owner = repository.owner.login
const name = repository.name
return { name, owner }
}

View file

@ -2,18 +2,35 @@ import {
RepositoriesDatabase,
IDatabaseGitHubRepository,
IDatabaseOwner,
IDatabaseProtectedBranch,
} from '../databases/repositories-database'
import { Owner } from '../../models/owner'
import { GitHubRepository } from '../../models/github-repository'
import { Repository } from '../../models/repository'
import { fatalError } from '../fatal-error'
import { IAPIRepository } from '../api'
import { IAPIRepository, IAPIBranch } from '../api'
import { BaseStore } from './base-store'
import { enableBranchProtectionChecks } from '../feature-flag'
/** The store for local repositories. */
export class RepositoriesStore extends BaseStore {
private db: RepositoriesDatabase
// Key-repo ID, Value-date
private lastStashCheckCache = new Map<number, number>()
/**
* Key is the GitHubRepository id, value is the protected branch count reported
* by the GitHub API.
*/
private branchProtectionSettingsFoundCache = new Map<number, boolean>()
/**
* Key is the lookup by the GitHubRepository id and branch name, value is the
* flag whether this branch is considered protected by the GitHub API
*/
private protectionEnabledForBranchCache = new Map<string, boolean>()
public constructor(db: RepositoriesDatabase) {
super()
@ -107,7 +124,8 @@ export class RepositoriesStore extends BaseStore {
repo.path,
repo.id!,
gitHubRepository,
repo.missing
repo.missing,
repo.isTutorialRepository
)
inflatedRepos.push(inflatedRepo)
}
@ -117,6 +135,53 @@ export class RepositoriesStore extends BaseStore {
)
}
/**
* Add a tutorial repository.
*
* This method differs from the `addRepository` method in that it
* requires that the repository has been created on the remote and
* set up to track it. Given that tutorial repositories are created
* from the no-repositories blank slate it shouldn't be possible for
* another repository with the same path to exist but in case that
* changes in the future this method will set the tutorial flag on
* the existing repository at the given path.
*/
public async addTutorialRepository(
path: string,
endpoint: string,
apiRepository: IAPIRepository
) {
await this.db.transaction(
'rw',
this.db.repositories,
this.db.gitHubRepositories,
this.db.owners,
async () => {
const gitHubRepository = await this.upsertGitHubRepository(
endpoint,
apiRepository
)
const existingRepo = await this.db.repositories.get({ path })
const existingRepoId =
existingRepo && existingRepo.id !== null ? existingRepo.id : undefined
return await this.db.repositories.put(
{
path,
gitHubRepositoryID: gitHubRepository.dbID,
missing: false,
lastStashCheckDate: null,
isTutorialRepository: true,
},
existingRepoId
)
}
)
this.emitUpdate()
}
/**
* Add a new local repository.
*
@ -147,6 +212,7 @@ export class RepositoriesStore extends BaseStore {
path,
gitHubRepositoryID: null,
missing: false,
lastStashCheckDate: null,
})
}
@ -178,15 +244,7 @@ export class RepositoriesStore extends BaseStore {
)
}
const gitHubRepositoryID = repository.gitHubRepository
? repository.gitHubRepository.dbID
: null
await this.db.repositories.put({
id: repository.id,
path: repository.path,
missing,
gitHubRepositoryID,
})
await this.db.repositories.update(repoID, { missing })
this.emitUpdate()
@ -194,7 +252,8 @@ export class RepositoriesStore extends BaseStore {
repository.path,
repository.id,
repository.gitHubRepository,
missing
missing,
repository.isTutorialRepository
)
}
@ -210,14 +269,9 @@ export class RepositoriesStore extends BaseStore {
)
}
const gitHubRepositoryID = repository.gitHubRepository
? repository.gitHubRepository.dbID
: null
await this.db.repositories.put({
id: repository.id,
await this.db.repositories.update(repoID, {
missing: false,
path: path,
gitHubRepositoryID,
path,
})
this.emitUpdate()
@ -226,10 +280,74 @@ export class RepositoriesStore extends BaseStore {
path,
repository.id,
repository.gitHubRepository,
false
false,
repository.isTutorialRepository
)
}
/**
* Sets the last time the repository was checked for stash entries
*
* @param repository The repository in which to update the last stash check date for
* @param date The date and time in which the last stash check took place; defaults to
* the current time
*/
public async updateLastStashCheckDate(
repository: Repository,
date: number = Date.now()
): Promise<void> {
const repoID = repository.id
if (repoID === 0) {
return fatalError(
'`updateLastStashCheckDate` can only update the last stash check date for a repository which has been added to the database.'
)
}
await this.db.repositories.update(repoID, {
lastStashCheckDate: date,
})
this.lastStashCheckCache.set(repoID, date)
this.emitUpdate()
}
/**
* Gets the last time the repository was checked for stash entries
*
* @param repository The repository in which to update the last stash check date for
*/
public async getLastStashCheckDate(
repository: Repository
): Promise<number | null> {
const repoID = repository.id
if (!repoID) {
return fatalError(
'`getLastStashCheckDate` - can only retrieve the last stash check date for a repositories that have been stored in the database.'
)
}
let lastCheckDate = this.lastStashCheckCache.get(repoID) || null
if (lastCheckDate !== null) {
return lastCheckDate
}
const record = await this.db.repositories.get(repoID)
if (record === undefined) {
return fatalError(
`'getLastStashCheckDate' - unable to find repository with ID: ${repoID}`
)
}
lastCheckDate = record.lastStashCheckDate
if (lastCheckDate !== null) {
this.lastStashCheckCache.set(repoID, lastCheckDate)
}
return lastCheckDate
}
private async putOwner(endpoint: string, login: string): Promise<Owner> {
login = login.toLowerCase()
@ -332,10 +450,74 @@ export class RepositoriesStore extends BaseStore {
repository.path,
repository.id,
updatedGitHubRepo,
repository.missing
repository.missing,
repository.isTutorialRepository
)
}
/** Add or update the branch protections associated with a GitHub repository. */
public async updateBranchProtections(
gitHubRepository: GitHubRepository,
protectedBranches: ReadonlyArray<IAPIBranch>
): Promise<void> {
if (!enableBranchProtectionChecks()) {
return
}
const dbID = gitHubRepository.dbID
if (!dbID) {
return fatalError(
'`updateBranchProtections` can only update a GitHub repository for a repository which has been added to the database.'
)
}
await this.db.transaction('rw', this.db.protectedBranches, async () => {
// This update flow is organized into two stages:
//
// - update the in-memory cache
// - update the underyling database state
//
// This should ensure any stale values are not being used, and avoids
// the need to query the database while the results are in memory.
const prefix = getKeyPrefix(dbID)
for (const key of this.protectionEnabledForBranchCache.keys()) {
// invalidate any cached entries belonging to this repository
if (key.startsWith(prefix)) {
this.protectionEnabledForBranchCache.delete(key)
}
}
const branchRecords = protectedBranches.map<IDatabaseProtectedBranch>(
b => ({
repoId: dbID,
name: b.name,
})
)
// update cached values to avoid database lookup
for (const item of branchRecords) {
const key = getKey(dbID, item.name)
this.protectionEnabledForBranchCache.set(key, true)
}
await this.db.protectedBranches
.where('repoId')
.equals(dbID)
.delete()
const protectionsFound = branchRecords.length > 0
this.branchProtectionSettingsFoundCache.set(dbID, protectionsFound)
if (branchRecords.length > 0) {
await this.db.protectedBranches.bulkAdd(branchRecords)
}
})
this.emitUpdate()
}
/**
* Set's the last time the repository was checked for pruning
*
@ -408,4 +590,97 @@ export class RepositoriesStore extends BaseStore {
return record!.lastPruneDate
}
/**
* Load the branch protection information for a repository from the database
* and cache the results in memory
*/
private async loadAndCacheBranchProtection(dbID: number) {
// query the database to find any protected branches
const branches = await this.db.protectedBranches
.where('repoId')
.equals(dbID)
.toArray()
const branchProtectionsFound = branches.length > 0
this.branchProtectionSettingsFoundCache.set(dbID, branchProtectionsFound)
// fill the retrieved records into the per-branch cache
for (const branch of branches) {
const key = getKey(dbID, branch.name)
this.protectionEnabledForBranchCache.set(key, true)
}
return branchProtectionsFound
}
/**
* Check if any branch protection settings are enabled for the repository
* through the GitHub API.
*/
public async hasBranchProtectionsConfigured(
gitHubRepository: GitHubRepository
): Promise<boolean> {
if (gitHubRepository.dbID === null) {
return fatalError(
'unable to get protected branches, GitHub repository has a null dbID'
)
}
const { dbID } = gitHubRepository
const branchProtectionsFound = this.branchProtectionSettingsFoundCache.get(
dbID
)
if (branchProtectionsFound === undefined) {
return this.loadAndCacheBranchProtection(dbID)
}
return branchProtectionsFound
}
/**
* Check if the given branch for the repository is protected through the
* GitHub API.
*/
public async isBranchProtectedOnRemote(
gitHubRepository: GitHubRepository,
branchName: string
): Promise<boolean> {
if (gitHubRepository.dbID === null) {
return fatalError(
'unable to get protected branches, GitHub repository has a null dbID'
)
}
const { dbID } = gitHubRepository
const key = getKey(dbID, branchName)
const cachedProtectionValue = this.protectionEnabledForBranchCache.get(key)
if (cachedProtectionValue === true) {
return cachedProtectionValue
}
const databaseValue = await this.db.protectedBranches.get([
dbID,
branchName,
])
// if no row found, this means no protection is found for the branch
const value = databaseValue !== undefined
this.protectionEnabledForBranchCache.set(key, value)
return value
}
}
/** Compute the key for the branch protection cache */
function getKey(dbID: number, branchName: string) {
return `${getKeyPrefix(dbID)}${branchName}`
}
/** Compute the key prefix for the branch protection cache */
function getKeyPrefix(dbID: number) {
return `${dbID}-`
}

View file

@ -16,6 +16,8 @@ import {
IRepositoryState,
RepositorySectionTab,
ICommitSelection,
IRebaseState,
ChangesSelectionKind,
} from '../app-state'
import { ComparisonCache } from '../comparison-cache'
import { IGitHubUser } from '../databases'
@ -97,6 +99,17 @@ export class RepositoryStateCache {
return { branchesState: newState }
})
}
public updateRebaseState<K extends keyof IRebaseState>(
repository: Repository,
fn: (branchesState: IRebaseState) => Pick<IRebaseState, K>
) {
this.update(repository, state => {
const { rebaseState } = state
const newState = merge(rebaseState, fn(rebaseState))
return { rebaseState: newState }
})
}
}
function getInitialRepositoryState(): IRepositoryState {
@ -111,12 +124,17 @@ function getInitialRepositoryState(): IRepositoryState {
workingDirectory: WorkingDirectoryStatus.fromFiles(
new Array<WorkingDirectoryFileChange>()
),
selectedFileIDs: [],
diff: null,
selection: {
kind: ChangesSelectionKind.WorkingDirectory,
selectedFileIDs: [],
diff: null,
},
commitMessage: DefaultCommitMessage,
coAuthors: [],
showCoAuthoredBy: false,
conflictState: null,
stashEntry: null,
currentBranchProtected: false,
},
selectedSection: RepositorySectionTab.Changes,
branchesState: {
@ -145,6 +163,12 @@ function getInitialRepositoryState(): IRepositoryState {
defaultBranch: null,
inferredComparisonBranch: { branch: null, aheadBehind: null },
},
rebaseState: {
step: null,
progress: null,
commits: null,
userHasResolvedConflicts: false,
},
commitAuthor: null,
gitHubUsers: new Map<string, IGitHubUser>(),
commitLookup: new Map<string, Commit>(),
@ -157,7 +181,5 @@ function getInitialRepositoryState(): IRepositoryState {
checkoutProgress: null,
pushPullFetchProgress: null,
revertProgress: null,
branchFilterText: '',
pullRequestFilterText: '',
}
}

View file

@ -28,7 +28,7 @@ function getUnverifiedUserErrorMessage(login: string): string {
return `Unable to authenticate. The account ${login} is lacking a verified email address. Please sign in to GitHub.com, confirm your email address in the Emails section under Personal settings, and try again.`
}
const EnterpriseTooOldMessage = `The GitHub Enterprise version does not support GitHub Desktop. Talk to your server's administrator about upgrading to the latest version of GitHub Enterprise.`
const EnterpriseTooOldMessage = `The GitHub Enterprise Server version does not support GitHub Desktop. Talk to your server's administrator about upgrading to the latest version of GitHub Enterprise Server.`
/**
* An enumeration of the possible steps that the sign in
@ -79,8 +79,8 @@ export interface ISignInState {
/**
* State interface representing the endpoint entry step.
* This is the initial step in the Enterprise sign in flow
* and is not present when signing in to GitHub.com
* This is the initial step in the Enterprise Server sign in
* flow and is not present when signing in to GitHub.com
*/
export interface IEndpointEntryState extends ISignInState {
readonly kind: SignInStep.EndpointEntry
@ -91,7 +91,7 @@ export interface IEndpointEntryState extends ISignInState {
* the user provides credentials and/or initiates a browser
* OAuth sign in process. This step occurs as the first step
* when signing in to GitHub.com and as the second step when
* signing in to a GitHub Enterprise instance.
* signing in to a GitHub Enterprise Server instance.
*/
export interface IAuthenticationState extends ISignInState {
readonly kind: SignInStep.Authentication
@ -100,15 +100,16 @@ export interface IAuthenticationState extends ISignInState {
* The URL to the host which we're currently authenticating
* against. This will be either https://api.github.com when
* signing in against GitHub.com or a user-specified
* URL when signing in against a GitHub Enterprise instance.
* URL when signing in against a GitHub Enterprise Server
* instance.
*/
readonly endpoint: string
/**
* A value indicating whether or not the endpoint supports
* basic authentication (i.e. username and password). All
* GitHub Enterprise instances support OAuth (or web flow
* sign-in).
* GitHub Enterprise Server instances support OAuth (or web
* flow sign-in).
*/
readonly supportsBasicAuth: boolean
@ -122,8 +123,8 @@ export interface IAuthenticationState extends ISignInState {
* State interface representing the TwoFactorAuthentication
* step where the user provides an OTP token. This step
* occurs after the authentication step both for GitHub.com,
* and GitHub Enterprise when the user has enabled two factor
* authentication on the host.
* and GitHub Enterprise Server when the user has enabled two
* factor authentication on the host.
*/
export interface ITwoFactorAuthenticationState extends ISignInState {
readonly kind: SignInStep.TwoFactorAuthentication
@ -132,7 +133,8 @@ export interface ITwoFactorAuthenticationState extends ISignInState {
* The URL to the host which we're currently authenticating
* against. This will be either https://api.github.com when
* signing in against GitHub.com or a user-specified
* URL when signing in against a GitHub Enterprise instance.
* URL when signing in against a GitHub Enterprise Server
* instance.
*/
readonly endpoint: string
@ -187,7 +189,7 @@ interface IAuthenticationEvent {
/**
* A store encapsulating all logic related to signing in a user
* to GitHub.com, or a GitHub Enterprise instance.
* to GitHub.com, or a GitHub Enterprise Server instance.
*/
export class SignInStore extends TypedBaseStore<SignInState | null> {
private state: SignInState | null = null
@ -240,7 +242,7 @@ export class SignInStore extends TypedBaseStore<SignInState | null> {
}
} else {
throw new Error(
`Unable to authenticate with the GitHub Enterprise instance. Verify that the URL is correct, that your GitHub Enterprise instance is running version ${minimumSupportedEnterpriseVersion} or later, that you have an internet connection and try again.`
`Unable to authenticate with the GitHub Enterprise Server instance. Verify that the URL is correct, that your GitHub Enterprise Server instance is running version ${minimumSupportedEnterpriseVersion} or later, that you have an internet connection and try again.`
)
}
}
@ -388,6 +390,13 @@ export class SignInStore extends TypedBaseStore<SignInState | null> {
loading: false,
error: new Error(EnterpriseTooOldMessage),
})
} else if (response.kind === AuthorizationResponseKind.WebFlowRequired) {
this.setState({
...currentState,
loading: false,
supportsBasicAuth: false,
kind: SignInStep.Authentication,
})
} else {
return assertNever(response, `Unsupported response: ${response}`)
}
@ -438,9 +447,9 @@ export class SignInStore extends TypedBaseStore<SignInState | null> {
}
/**
* Initiate a sign in flow for a GitHub Enterprise instance. This will
* put the store in the EndpointEntry step ready to receive the url
* to the enterprise instance.
* Initiate a sign in flow for a GitHub Enterprise Server instance.
* This will put the store in the EndpointEntry step ready to
* receive the url to the enterprise instance.
*/
public beginEnterpriseSignIn() {
this.setState({
@ -482,11 +491,11 @@ export class SignInStore extends TypedBaseStore<SignInState | null> {
let error = e
if (e.name === InvalidURLErrorName) {
error = new Error(
`The GitHub Enterprise instance address doesn't appear to be a valid URL. We're expecting something like https://github.example.com.`
`The GitHub Enterprise Server instance address doesn't appear to be a valid URL. We're expecting something like https://github.example.com.`
)
} else if (e.name === InvalidProtocolErrorName) {
error = new Error(
'Unsupported protocol. Only http or https is supported when authenticating with GitHub Enterprise instances.'
'Unsupported protocol. Only http or https is supported when authenticating with GitHub Enterprise Server instances.'
)
}
@ -619,6 +628,16 @@ export class SignInStore extends TypedBaseStore<SignInState | null> {
case AuthorizationResponseKind.EnterpriseTooOld:
this.emitError(new Error(EnterpriseTooOldMessage))
break
case AuthorizationResponseKind.WebFlowRequired:
this.setState({
...currentState,
forgotPasswordUrl: this.getForgotPasswordURL(currentState.endpoint),
loading: false,
supportsBasicAuth: false,
kind: SignInStep.Authentication,
error: null,
})
break
default:
assertNever(response, `Unknown response: ${response}`)
}

View file

@ -10,11 +10,14 @@ import {
isMergeConflictState,
isRebaseConflictState,
RebaseConflictState,
ChangesSelection,
ChangesSelectionKind,
} from '../../app-state'
import { DiffSelectionType, IDiff } from '../../../models/diff'
import { DiffSelectionType } from '../../../models/diff'
import { caseInsensitiveCompare } from '../../compare'
import { IStatsStore } from '../../stats/stats-store'
import { ManualConflictResolution } from '../../../models/manual-conflict-resolution'
import { assertNever } from '../../fatal-error'
/**
* Internal shape of the return value from this response because the compiler
@ -23,8 +26,7 @@ import { ManualConflictResolution } from '../../../models/manual-conflict-resolu
*/
type ChangedFilesResult = {
readonly workingDirectory: WorkingDirectoryStatus
readonly selectedFileIDs: string[]
readonly diff: IDiff | null
readonly selection: ChangesSelection
}
export function updateChangedFiles(
@ -62,19 +64,6 @@ export function updateChangedFiles(
// lookups using .find on the mergedFiles array.
const mergedFileIds = new Set(mergedFiles.map(x => x.id))
// The previously selected files might not be available in the working
// directory any more due to having been committed or discarded so we'll
// do a pass over and filter out any selected files that aren't available.
let selectedFileIDs = state.selectedFileIDs.filter(id =>
mergedFileIds.has(id)
)
// Select the first file if we don't have anything selected and we
// have something to select.
if (selectedFileIDs.length === 0 && mergedFiles.length > 0) {
selectedFileIDs = [mergedFiles[0].id]
}
// The file selection could have changed if the previously selected files
// are no longer selectable (they were discarded or committed) but if they
// were not changed we can reuse the diff. Note, however that we only render
@ -83,17 +72,46 @@ export function updateChangedFiles(
// diff we had, if not we'll clear it.
const workingDirectory = WorkingDirectoryStatus.fromFiles(mergedFiles)
const diff =
selectedFileIDs.length === 1 &&
state.selectedFileIDs.length === 1 &&
state.selectedFileIDs[0] === selectedFileIDs[0]
? state.diff
: null
const selectionKind = state.selection.kind
if (state.selection.kind === ChangesSelectionKind.WorkingDirectory) {
// The previously selected files might not be available in the working
// directory any more due to having been committed or discarded so we'll
// do a pass over and filter out any selected files that aren't available.
let selectedFileIDs = state.selection.selectedFileIDs.filter(id =>
mergedFileIds.has(id)
)
return {
workingDirectory,
selectedFileIDs,
diff,
// Select the first file if we don't have anything selected and we
// have something to select.
if (selectedFileIDs.length === 0 && mergedFiles.length > 0) {
selectedFileIDs = [mergedFiles[0].id]
}
const diff =
selectedFileIDs.length === 1 &&
state.selection.selectedFileIDs.length === 1 &&
state.selection.selectedFileIDs[0] === selectedFileIDs[0]
? state.selection.diff
: null
return {
workingDirectory,
selection: {
kind: ChangesSelectionKind.WorkingDirectory,
selectedFileIDs,
diff,
},
}
} else if (state.selection.kind === ChangesSelectionKind.Stash) {
return {
workingDirectory,
selection: state.selection,
}
} else {
return assertNever(
state.selection,
`Unknown selection kind ${selectionKind}`
)
}
}
@ -117,7 +135,7 @@ function getConflictState(
}
}
if (status.rebaseContext !== null) {
if (status.rebaseInternalState !== null) {
const { currentTip } = status
if (currentTip == null) {
return null
@ -127,7 +145,7 @@ function getConflictState(
targetBranch,
originalBranchTip,
baseBranchTip,
} = status.rebaseContext
} = status.rebaseInternalState
return {
kind: 'rebase',
@ -215,12 +233,11 @@ function performEffectsForRebaseStateChange(
) {
const previousTip = prevConflictState.originalBranchTip
if (
const previousTipChanged =
previousTip !== currentTip &&
currentBranch === prevConflictState.targetBranch
) {
statsStore.recordRebaseSuccessAfterConflicts()
} else {
if (!previousTipChanged) {
statsStore.recordRebaseAbortedAfterConflicts()
}
}
@ -277,3 +294,45 @@ export function updateConflictState(
return newConflictState
}
/**
* Generate the partial state needed to update ChangesState selection property
* when a user or external constraints require us to do so.
*
* @param state The current changes state
* @param files An array of files to select when showing the working directory.
* If undefined this method will preserve the previously selected
* files or pick the first changed file if no selection exists.
*/
export function selectWorkingDirectoryFiles(
state: IChangesState,
files?: ReadonlyArray<WorkingDirectoryFileChange>
): Pick<IChangesState, 'selection'> {
let selectedFileIDs: Array<string>
if (files === undefined) {
if (state.selection.kind === ChangesSelectionKind.WorkingDirectory) {
// No files provided, just a desire to make sure selection is
// working directory. If it already is there's nothing for us to do.
return { selection: state.selection }
} else if (state.workingDirectory.files.length > 0) {
// No files provided and the current selection is stash, pick the
// first file we've got.
selectedFileIDs = [state.workingDirectory.files[0].id]
} else {
// Not much to do here. No files provided, nothing in the
// working directory.
selectedFileIDs = new Array<string>()
}
} else {
selectedFileIDs = files.map(x => x.id)
}
return {
selection: {
kind: ChangesSelectionKind.WorkingDirectory as ChangesSelectionKind.WorkingDirectory,
selectedFileIDs,
diff: null,
},
}
}

View file

@ -147,6 +147,12 @@ export class Tokenizer {
maybeIssue = text.slice(index, nextIndex)
}
// handle list of issues
if (maybeIssue.endsWith(',')) {
nextIndex -= 1
maybeIssue = text.slice(index, nextIndex)
}
if (!/^#\d+$/.test(maybeIssue)) {
return null
}

View file

@ -4,11 +4,11 @@ import { getDotComAPIEndpoint } from './api'
/**
* Best-effort attempt to figure out if this commit was committed using
* the web flow on GitHub.com or GitHub Enterprise. Web flow
* the web flow on GitHub.com or GitHub Enterprise Server. Web flow
* commits (such as PR merges) will have a special GitHub committer
* with a noreply email address.
*
* For GitHub.com we can be spot on but for GitHub Enterprise it's
* For GitHub.com we can be spot on but for GitHub Enterprise Server it's
* possible we could fail if they've set up a custom smtp host
* that doesn't correspond to the hostname.
*/

View file

@ -45,7 +45,12 @@ export function registerWindowStateChangedEvents(
window.on('unmaximize', () => sendWindowStateEvent(window, 'normal'))
window.on('restore', () => sendWindowStateEvent(window, 'normal'))
window.on('hide', () => sendWindowStateEvent(window, 'hidden'))
window.on('show', () => sendWindowStateEvent(window, 'normal'))
window.on('show', () => {
// because the app can be maximized before being closed - which will restore it
// maximized on the next launch - this function should inspect the current state
// rather than always assume it is a 'normal' launch
sendWindowStateEvent(window, getWindowState(window))
})
}
/**

View file

@ -8,8 +8,7 @@ import { ILaunchStats } from '../lib/stats'
import { menuFromElectronMenu } from '../models/app-menu'
import { now } from './now'
import * as path from 'path'
let windowStateKeeper: any | null = null
import * as windowStateKeeper from 'electron-window-state'
export class AppWindow {
private window: Electron.BrowserWindow
@ -22,13 +21,6 @@ export class AppWindow {
private minHeight = 660
public constructor() {
if (!windowStateKeeper) {
// `electron-window-state` requires Electron's `screen` module, which can
// only be required after the app has emitted `ready`. So require it
// lazily.
windowStateKeeper = require('electron-window-state')
}
const savedWindowState = windowStateKeeper({
defaultWidth: this.minWidth,
defaultHeight: this.minHeight,
@ -51,6 +43,7 @@ export class AppWindow {
disableBlinkFeatures: 'Auxclick',
// Enable, among other things, the ResizeObserver
experimentalFeatures: true,
nodeIntegration: true,
},
acceptFirstMouse: true,
}

View file

@ -43,6 +43,7 @@ export class CrashWindow {
// process but our components which relies on ResizeObserver should
// be able to degrade gracefully.
experimentalFeatures: false,
nodeIntegration: true,
},
}

View file

@ -1,11 +1,14 @@
import { app, net } from 'electron'
const ErrorEndpoint = 'https://central.github.com/api/desktop/exception'
const NonFatalErrorEndpoint =
'https://central.github.com/api/desktop-non-fatal/exception'
/** Report the error to Central. */
export async function reportError(
error: Error,
extra?: { [key: string]: string }
extra?: { [key: string]: string },
nonFatal?: boolean
) {
if (__DEV__) {
return
@ -32,7 +35,7 @@ export async function reportError(
const requestOptions: Electron.RequestOptions = {
method: 'POST',
url: ErrorEndpoint,
url: nonFatal ? NonFatalErrorEndpoint : ErrorEndpoint,
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},

View file

@ -2,14 +2,12 @@ import '../lib/logging/main/install'
import { app, Menu, ipcMain, BrowserWindow, shell } from 'electron'
import * as Fs from 'fs'
import * as URL from 'url'
import { MenuLabelsEvent } from '../models/menu-labels'
import { AppWindow } from './app-window'
import {
buildDefaultMenu,
MenuEvent,
MenuLabels,
getAllMenuItems,
} from './menu'
import { buildDefaultMenu, MenuEvent, getAllMenuItems } from './menu'
import { shellNeedsPatching, updateEnvironmentForProcess } from '../lib/shell'
import { parseAppURL } from '../lib/parse-app-url'
import { handleSquirrelEvent } from './squirrel-updater'
@ -61,10 +59,49 @@ function handleUncaughtException(error: Error) {
showUncaughtException(isLaunchError, error)
}
/**
* Calculates the number of seconds the app has been running
*/
function getUptimeInSeconds() {
return (now() - launchTime) / 1000
}
function getExtraErrorContext(): Record<string, string> {
return {
uptime: getUptimeInSeconds().toFixed(3),
time: new Date().toString(),
}
}
/** Extra argument for the protocol launcher on Windows */
const protocolLauncherArg = '--protocol-launcher'
const possibleProtocols = new Set(['x-github-client'])
if (__DEV__) {
possibleProtocols.add('x-github-desktop-dev-auth')
} else {
possibleProtocols.add('x-github-desktop-auth')
}
// Also support Desktop Classic's protocols.
if (__DARWIN__) {
possibleProtocols.add('github-mac')
} else if (__WIN32__) {
possibleProtocols.add('github-windows')
}
app.on('window-all-closed', () => {
// If we don't subscribe to this event and all windows are closed, the default
// behavior is to quit the app. We don't want that though, we control that
// behavior through the mainWindow onClose event such that on macOS we only
// hide the main window when a user attempts to close it.
//
// If we don't subscribe to this and change the default behavior we break
// the crash process window which is shown after the main window is closed.
})
process.on('uncaughtException', (error: Error) => {
error = withSourceMappedStack(error)
reportError(error)
reportError(error, getExtraErrorContext())
handleUncaughtException(error)
})
@ -179,12 +216,23 @@ function handlePossibleProtocolLauncherArgs(args: ReadonlyArray<string>) {
if (__WIN32__) {
// Desktop registers it's protocol handler callback on Windows as
// `[executable path] --protocol-launcher "%1"`. At launch it checks
// for that exact scenario here before doing any processing, and only
// processing the first argument. If there's more than 3 args because of a
// `[executable path] --protocol-launcher "%1"`. Note that extra command
// line arguments might be added by Chromium
// (https://electronjs.org/docs/api/app#event-second-instance).
// At launch Desktop checks for that exact scenario here before doing any
// processing. If there's more than one matching url argument because of a
// malformed or untrusted url then we bail out.
if (args.length === 3 && args[1] === '--protocol-launcher') {
handleAppURL(args[2])
const matchingUrls = args.filter(arg => {
const url = URL.parse(arg)
// i think this `slice` is just removing a trailing `:`
return url.protocol && possibleProtocols.has(url.protocol.slice(0, -1))
})
if (args.includes(protocolLauncherArg) && matchingUrls.length === 1) {
handleAppURL(matchingUrls[0])
} else {
log.error(`Malformed launch arguments received: ${args}`)
}
} else if (args.length > 1) {
handleAppURL(args[1])
@ -198,7 +246,7 @@ function handlePossibleProtocolLauncherArgs(args: ReadonlyArray<string>) {
function setAsDefaultProtocolClient(protocol: string) {
if (__WIN32__) {
app.setAsDefaultProtocolClient(protocol, process.execPath, [
'--protocol-launcher',
protocolLauncherArg,
])
} else {
app.setAsDefaultProtocolClient(protocol)
@ -219,28 +267,22 @@ app.on('ready', () => {
readyTime = now() - launchTime
setAsDefaultProtocolClient('x-github-client')
if (__DEV__) {
setAsDefaultProtocolClient('x-github-desktop-dev-auth')
} else {
setAsDefaultProtocolClient('x-github-desktop-auth')
}
// Also support Desktop Classic's protocols.
if (__DARWIN__) {
setAsDefaultProtocolClient('github-mac')
} else if (__WIN32__) {
setAsDefaultProtocolClient('github-windows')
}
possibleProtocols.forEach(protocol => setAsDefaultProtocolClient(protocol))
createWindow()
Menu.setApplicationMenu(buildDefaultMenu({}))
Menu.setApplicationMenu(
buildDefaultMenu({
selectedShell: null,
selectedExternalEditor: null,
askForConfirmationOnRepositoryRemoval: false,
askForConfirmationOnForcePush: false,
})
)
ipcMain.on(
'update-preferred-app-menu-item-labels',
(event: Electron.IpcMessageEvent, labels: MenuLabels) => {
(event: Electron.IpcMessageEvent, labels: MenuLabelsEvent) => {
// The current application menu is mutable and we frequently
// change whether particular items are enabled or not through
// the update-menu-state IPC event. This menu that we're creating
@ -350,15 +392,16 @@ app.on('ready', () => {
) => {
let sendMenuChangedEvent = false
const currentMenu = Menu.getApplicationMenu()
if (currentMenu === null) {
log.debug(`unable to get current menu, bailing out...`)
return
}
for (const item of items) {
const { id, state } = item
const currentMenu = Menu.getApplicationMenu()
if (currentMenu === null) {
return
}
const menuItem = currentMenu.getMenuItemById(id)
if (menuItem) {
@ -378,6 +421,7 @@ app.on('ready', () => {
}
if (sendMenuChangedEvent && mainWindow) {
Menu.setApplicationMenu(currentMenu)
mainWindow.sendAppMenu()
}
}
@ -441,15 +485,26 @@ app.on('ready', () => {
'send-error-report',
(
event: Electron.IpcMessageEvent,
{ error, extra }: { error: Error; extra: { [key: string]: string } }
{
error,
extra,
nonFatal,
}: { error: Error; extra: { [key: string]: string }; nonFatal?: boolean }
) => {
reportError(error, extra)
reportError(
error,
{
...getExtraErrorContext(),
...extra,
},
nonFatal
)
}
)
ipcMain.on(
'open-external',
(event: Electron.IpcMessageEvent, { path }: { path: string }) => {
async (event: Electron.IpcMessageEvent, { path }: { path: string }) => {
const pathLowerCase = path.toLowerCase()
if (
pathLowerCase.startsWith('http://') ||
@ -458,7 +513,14 @@ app.on('ready', () => {
log.info(`opening in browser: ${path}`)
}
const result = shell.openExternal(path)
let result
try {
await shell.openExternal(path)
result = true
} catch (e) {
log.error(`Call to openExternal failed: '${e}'`)
result = false
}
event.sender.send('open-external-result', { result })
}
)

View file

@ -4,23 +4,23 @@ import { MenuEvent } from './menu-event'
import { truncateWithEllipsis } from '../../lib/truncate-with-ellipsis'
import { getLogDirectoryPath } from '../../lib/logging/get-log-path'
import { ensureDir } from 'fs-extra'
import { log } from '../log'
import { openDirectorySafe } from '../shell'
import { enableRebaseDialog } from '../../lib/feature-flag'
import { enableRebaseDialog, enableStashing } from '../../lib/feature-flag'
import { MenuLabelsEvent } from '../../models/menu-labels'
import { DefaultEditorLabel } from '../../ui/lib/context-menu'
const defaultEditorLabel = __DARWIN__
? 'Open in External Editor'
: 'Open in external editor'
const defaultShellLabel = __DARWIN__
? 'Open in Terminal'
: 'Open in Command Prompt'
const defaultPullRequestLabel = __DARWIN__
const createPullRequestLabel = __DARWIN__
? 'Create Pull Request'
: 'Create &pull request'
const defaultBranchNameDefaultValue = __DARWIN__
? 'Default Branch'
: 'default branch'
const showPullRequestLabel = __DARWIN__
? 'Show Pull Request'
: 'Show &pull request'
const defaultBranchNameValue = __DARWIN__ ? 'Default Branch' : 'default branch'
const confirmRepositoryRemovalLabel = __DARWIN__ ? 'Remove…' : '&Remove…'
const repositoryRemovalLabel = __DARWIN__ ? 'Remove' : '&Remove'
enum ZoomDirection {
Reset,
@ -28,21 +28,34 @@ enum ZoomDirection {
Out,
}
export type MenuLabels = {
editorLabel?: string
shellLabel?: string
pullRequestLabel?: string
defaultBranchName?: string
}
export function buildDefaultMenu({
editorLabel = defaultEditorLabel,
shellLabel = defaultShellLabel,
pullRequestLabel = defaultPullRequestLabel,
defaultBranchName = defaultBranchNameDefaultValue,
}: MenuLabels): Electron.Menu {
selectedExternalEditor,
selectedShell,
askForConfirmationOnForcePush,
askForConfirmationOnRepositoryRemoval,
hasCurrentPullRequest = false,
defaultBranchName = defaultBranchNameValue,
isForcePushForCurrentRepository = false,
isStashedChangesVisible = false,
}: MenuLabelsEvent): Electron.Menu {
defaultBranchName = truncateWithEllipsis(defaultBranchName, 25)
const removeRepoLabel = askForConfirmationOnRepositoryRemoval
? confirmRepositoryRemovalLabel
: repositoryRemovalLabel
const pullRequestLabel = hasCurrentPullRequest
? showPullRequestLabel
: createPullRequestLabel
const shellLabel =
selectedShell === null ? defaultShellLabel : `Open in ${selectedShell}`
const editorLabel =
selectedExternalEditor === null
? DefaultEditorLabel
: `Open in ${selectedExternalEditor}`
const template = new Array<Electron.MenuItemConstructorOptions>()
const separator: Electron.MenuItemConstructorOptions = { type: 'separator' }
@ -120,7 +133,11 @@ export function buildDefaultMenu({
click: emit('show-preferences'),
},
separator,
{ role: 'quit' }
{
role: 'quit',
label: 'E&xit',
accelerator: 'Alt+F4',
}
)
}
@ -140,6 +157,13 @@ export function buildDefaultMenu({
accelerator: 'CmdOrCtrl+A',
click: emit('select-all'),
},
separator,
{
id: 'find',
label: __DARWIN__ ? 'Find' : '&Find',
accelerator: 'CmdOrCtrl+F',
click: emit('find-text'),
},
],
})
@ -177,6 +201,15 @@ export function buildDefaultMenu({
accelerator: 'CmdOrCtrl+G',
click: emit('go-to-commit-message'),
},
{
label: getStashedChangesLabel(isStashedChangesVisible),
id: 'toggle-stashed-changes',
accelerator: 'Ctrl+H',
click: isStashedChangesVisible
? emit('hide-stashed-changes')
: emit('show-stashed-changes'),
visible: enableStashing(),
},
{
label: __DARWIN__ ? 'Toggle Full Screen' : 'Toggle &full screen',
role: 'togglefullscreen',
@ -230,15 +263,22 @@ export function buildDefaultMenu({
],
})
const pushLabel = getPushLabel(
isForcePushForCurrentRepository,
askForConfirmationOnForcePush
)
const pushEventType = isForcePushForCurrentRepository ? 'force-push' : 'push'
template.push({
label: __DARWIN__ ? 'Repository' : '&Repository',
id: 'repository',
submenu: [
{
id: 'push',
label: __DARWIN__ ? 'Push' : 'P&ush',
label: pushLabel,
accelerator: 'CmdOrCtrl+P',
click: emit('push'),
click: emit(pushEventType),
},
{
id: 'pull',
@ -247,9 +287,9 @@ export function buildDefaultMenu({
click: emit('pull'),
},
{
label: __DARWIN__ ? 'Remove' : '&Remove',
label: removeRepoLabel,
id: 'remove-repository',
accelerator: 'CmdOrCtrl+Delete',
accelerator: 'CmdOrCtrl+Backspace',
click: emit('remove-repository'),
},
separator,
@ -313,6 +353,13 @@ export function buildDefaultMenu({
click: emit('delete-branch'),
},
separator,
{
label: __DARWIN__ ? 'Discard All Changes…' : 'Discard all changes…',
id: 'discard-all-changes',
accelerator: 'CmdOrCtrl+Shift+Backspace',
click: emit('discard-all-changes'),
},
separator,
{
label: __DARWIN__
? `Update from ${defaultBranchName}`
@ -376,23 +423,40 @@ export function buildDefaultMenu({
const submitIssueItem: Electron.MenuItemConstructorOptions = {
label: __DARWIN__ ? 'Report Issue…' : 'Report issue…',
click() {
shell.openExternal('https://github.com/desktop/desktop/issues/new/choose')
shell
.openExternal('https://github.com/desktop/desktop/issues/new/choose')
.catch(err => log.error('Failed opening issue creation page', err))
},
}
const contactSupportItem: Electron.MenuItemConstructorOptions = {
label: __DARWIN__ ? 'Contact GitHub Support…' : '&Contact GitHub support…',
click() {
shell.openExternal(
`https://github.com/contact?from_desktop_app=1&app_version=${app.getVersion()}`
)
shell
.openExternal(
`https://github.com/contact?from_desktop_app=1&app_version=${app.getVersion()}`
)
.catch(err => log.error('Failed opening contact support page', err))
},
}
const showUserGuides: Electron.MenuItemConstructorOptions = {
label: 'Show User Guides',
click() {
shell.openExternal('https://help.github.com/desktop/guides/')
shell
.openExternal('https://help.github.com/desktop/guides/')
.catch(err => log.error('Failed opening user guides page', err))
},
}
const showKeyboardShortcuts: Electron.MenuItemConstructorOptions = {
label: __DARWIN__ ? 'Show Keyboard Shortcuts' : 'Show keyboard shortcuts',
click() {
shell
.openExternal(
'https://help.github.com/en/desktop/getting-started-with-github-desktop/keyboard-shortcuts-in-github-desktop'
)
.catch(err => log.error('Failed opening keyboard shortcuts page', err))
},
}
@ -411,7 +475,7 @@ export function buildDefaultMenu({
openDirectorySafe(logPath)
})
.catch(err => {
log('error', err.message)
log.error('Failed opening logs directory', err)
})
},
}
@ -420,6 +484,7 @@ export function buildDefaultMenu({
submitIssueItem,
contactSupportItem,
showUserGuides,
showKeyboardShortcuts,
showLogsItem,
]
@ -444,6 +509,10 @@ export function buildDefaultMenu({
click: emit('show-release-notes-popup'),
},
],
},
{
label: 'Prune branches',
click: emit('test-prune-branches'),
}
)
}
@ -473,6 +542,29 @@ export function buildDefaultMenu({
return Menu.buildFromTemplate(template)
}
function getPushLabel(
isForcePushForCurrentRepository: boolean,
askForConfirmationOnForcePush: boolean
): string {
if (!isForcePushForCurrentRepository) {
return __DARWIN__ ? 'Push' : 'P&ush'
}
if (askForConfirmationOnForcePush) {
return __DARWIN__ ? 'Force Push…' : 'Force P&ush…'
}
return __DARWIN__ ? 'Force Push' : 'Force P&ush'
}
function getStashedChangesLabel(isStashedChangesVisible: boolean): string {
if (isStashedChangesVisible) {
return __DARWIN__ ? 'Hide Stashed Changes' : 'H&ide stashed changes'
}
return __DARWIN__ ? 'Show Stashed Changes' : 'Sho&w stashed changes'
}
type ClickHandler = (
menuItem: Electron.MenuItem,
browserWindow: Electron.BrowserWindow,
@ -525,29 +617,27 @@ function zoom(direction: ZoomDirection): ClickHandler {
webContents.setZoomFactor(1)
webContents.send('zoom-factor-changed', 1)
} else {
webContents.getZoomFactor(rawZoom => {
const zoomFactors =
direction === ZoomDirection.In ? ZoomInFactors : ZoomOutFactors
const rawZoom = webContents.getZoomFactor()
const zoomFactors =
direction === ZoomDirection.In ? ZoomInFactors : ZoomOutFactors
// So the values that we get from getZoomFactor are floating point
// precision numbers from chromium that don't always round nicely so
// we'll have to do a little trick to figure out which of our supported
// zoom factors the value is referring to.
const currentZoom = findClosestValue(zoomFactors, rawZoom)
// So the values that we get from getZoomFactor are floating point
// precision numbers from chromium that don't always round nicely so
// we'll have to do a little trick to figure out which of our supported
// zoom factors the value is referring to.
const currentZoom = findClosestValue(zoomFactors, rawZoom)
const nextZoomLevel = zoomFactors.find(f =>
direction === ZoomDirection.In ? f > currentZoom : f < currentZoom
)
const nextZoomLevel = zoomFactors.find(f =>
direction === ZoomDirection.In ? f > currentZoom : f < currentZoom
)
// If we couldn't find a zoom level (likely due to manual manipulation
// of the zoom factor in devtools) we'll just snap to the closest valid
// factor we've got.
const newZoom =
nextZoomLevel === undefined ? currentZoom : nextZoomLevel
// If we couldn't find a zoom level (likely due to manual manipulation
// of the zoom factor in devtools) we'll just snap to the closest valid
// factor we've got.
const newZoom = nextZoomLevel === undefined ? currentZoom : nextZoomLevel
webContents.setZoomFactor(newZoom)
webContents.send('zoom-factor-changed', newZoom)
})
webContents.setZoomFactor(newZoom)
webContents.send('zoom-factor-changed', newZoom)
}
}
}

View file

@ -1,6 +1,5 @@
export * from './build-default-menu'
export * from './ensure-item-ids'
export * from './menu-event'
export * from './menu-ids'
export * from './crash-menu'
export * from './get-all-menu-items'

View file

@ -1,5 +1,6 @@
export type MenuEvent =
| 'push'
| 'force-push'
| 'pull'
| 'show-changes'
| 'show-history'
@ -10,6 +11,7 @@ export type MenuEvent =
| 'create-repository'
| 'rename-branch'
| 'delete-branch'
| 'discard-all-changes'
| 'show-preferences'
| 'choose-repository'
| 'open-working-directory'
@ -30,3 +32,7 @@ export type MenuEvent =
| 'open-external-editor'
| 'select-all'
| 'show-release-notes-popup'
| 'show-stashed-changes'
| 'hide-stashed-changes'
| 'test-prune-branches'
| 'find-text'

View file

@ -18,7 +18,9 @@ export function openDirectorySafe(path: string) {
slashes: true,
})
shell.openExternal(directoryURL)
shell
.openExternal(directoryURL)
.catch(err => log.error(`Failed to open directory (${path})`, err))
} else {
shell.openItem(path)
}

View file

@ -1,7 +1,7 @@
import { getDotComAPIEndpoint, IAPIEmail } from '../lib/api'
/**
* A GitHub account, representing the user found on GitHub The Website or GitHub Enterprise.
* A GitHub account, representing the user found on GitHub The Website or GitHub Enterprise Server.
*
* This contains a token that will be used for operations that require authentication.
*/
@ -15,11 +15,11 @@ export class Account {
* Create an instance of an account
*
* @param login The login name for this account
* @param endpoint The server for this account - GitHub or a GitHub Enterprise instance
* @param endpoint The server for this account - GitHub or a GitHub Enterprise Server instance
* @param token The access token used to perform operations on behalf of this account
* @param emails The current list of email addresses associated with the account
* @param avatarURL The profile URL to render for this account
* @param id The database id for this account
* @param id The GitHub.com or GitHub Enterprise Server database id for this account.
* @param name The friendly name associated with this account
*/
public constructor(

View file

@ -162,6 +162,24 @@ function getAccessKey(text: string): string | null {
return m ? m[1] : null
}
/** Workaround for missing type information on Electron.MenuItem.type */
function parseMenuItem(
type: string
): 'normal' | 'separator' | 'submenu' | 'checkbox' | 'radio' {
switch (type) {
case 'normal':
case 'separator':
case 'submenu':
case 'checkbox':
case 'radio':
return type
default:
throw new Error(
`Unable to parse string ${type} to a valid menu item type`
)
}
}
/**
* Creates an instance of one of the types in the MenuItem type union based
* on an Electron MenuItem instance. Will recurse through all sub menus and
@ -182,8 +200,10 @@ function menuItemFromElectronMenuItem(menuItem: Electron.MenuItem): MenuItem {
const accelerator = getAccelerator(menuItem)
const accessKey = getAccessKey(menuItem.label)
const type = parseMenuItem(menuItem.type)
// normal, separator, submenu, checkbox or radio.
switch (menuItem.type) {
switch (type) {
case 'normal':
return {
id,
@ -230,10 +250,7 @@ function menuItemFromElectronMenuItem(menuItem: Electron.MenuItem): MenuItem {
accessKey,
}
default:
return assertNever(
menuItem.type,
`Unknown menu item type ${menuItem.type}`
)
return assertNever(type, `Unknown menu item type ${type}`)
}
}
/**

View file

@ -19,7 +19,7 @@ export interface IAuthor {
readonly email: string
/**
* The GitHub.com or GitHub Enterprise login for
* The GitHub.com or GitHub Enterprise Server login for
* this author or null if that information is not
* available.
*/

View file

@ -27,7 +27,7 @@ export enum StartPoint {
}
/**
* Check if a branch is eligible for beign fast forarded.
* Check if a branch is eligible for being fast-forwarded.
*
* Requirements:
* 1. It's local.

View file

@ -37,6 +37,20 @@ function extractCoAuthors(trailers: ReadonlyArray<ITrailer>) {
return coAuthors
}
/**
* A minimal shape of data to represent a commit, for situations where the
* application does not require the full commit metadata.
*
* Equivalent to the output where Git command support the
* `--oneline --no-abbrev-commit` arguments to format a commit.
*/
export type CommitOneLine = {
/** The full commit id associated with the commit */
readonly sha: string
/** The first line of the commit message */
readonly summary: string
}
/** A git commit. */
export class Commit {
/**

View file

@ -1,14 +1,13 @@
/**
* A state representing the app computing whether a planned action will require
* further work by the user to complete.
* An action being computed in the background on behalf of the user
*/
export enum ComputedActionKind {
export enum ComputedAction {
/** The action is being computed in the background */
Loading = 'loading',
/** The action cannot be completed, for reasons the app should explain */
Invalid = 'invalid',
/** The action should complete without any additional work required by the user */
Clean = 'clean',
/** The action requires additional work by the user to complete successfully */
Conflicts = 'conflicts',
/** The action cannot be completed, for reasons the app should explain */
Invalid = 'invalid',
}

Some files were not shown because too many files have changed in this diff Show more