Merge branch 'development' into interop-requires

This commit is contained in:
Markus Olsson 2020-08-03 16:17:34 +02:00 committed by GitHub
commit 0e5f688b5a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 592 additions and 109 deletions

View file

@ -30,6 +30,7 @@
"dugite": "1.91.3",
"electron-window-state": "^5.0.3",
"event-kit": "^2.0.0",
"file-metadata": "^1.0.0",
"file-uri-to-path": "0.0.2",
"file-url": "^2.0.2",
"fs-admin": "^0.12.0",

View file

@ -7,8 +7,31 @@ export interface IAppShell {
readonly moveItemToTrash: (path: string) => boolean
readonly beep: () => void
readonly openExternal: (path: string) => Promise<boolean>
/**
* Reveals the specified file using the operating
* system default application.
* Do not use this method with non-validated paths.
*
* @param path - The path of the file to open
*/
readonly openItem: (path: string) => boolean
/**
* Reveals the specified file on the operating system
* default file explorer. If a folder is passed, it will
* open its parent folder and preselect the passed folder.
*
* @param path - The path of the file to show
*/
readonly showItemInFolder: (path: string) => void
/**
* Reveals the specified folder on the operating
* system default file explorer.
* Do not use this method with non-validated paths.
*
* @param path - The path of the folder to open
*/
readonly showFolderContents: (path: string) => void
}
export const shell: IAppShell = {
@ -29,6 +52,9 @@ export const shell: IAppShell = {
showItemInFolder: path => {
ipcRenderer.send('show-item-in-folder', { path })
},
showFolderContents: path => {
ipcRenderer.send('show-folder-contents', { path })
},
openItem: electronShell.openItem,
}

View file

@ -28,6 +28,13 @@ export class IssuesDatabase extends BaseDatabase {
clearIssues
)
}
public getIssuesForRepository(gitHubRepositoryID: number) {
return this.issues
.where('gitHubRepositoryID')
.equals(gitHubRepositoryID)
.toArray()
}
}
function clearIssues(transaction: Dexie.Transaction) {

View file

@ -225,3 +225,9 @@ declare class ResizeObserver {
public disconnect(): void
public observe(e: HTMLElement): void
}
declare module 'file-metadata' {
// eslint-disable-next-line no-restricted-syntax
function fileMetadata(path: string): Promise<plist.PlistObject>
export = fileMetadata
}

View file

@ -0,0 +1,8 @@
/**
* Hack: The file-metadata plugin has substantial dependencies
* (plist, DOMParser, etc) and it's only applicable on macOS.
*
* Therefore, when compiling on other platforms, we replace it
* with this tiny shim. See webpack.common.ts.
*/
module.exports = () => Promise.resolve({})

View file

@ -0,0 +1,41 @@
import getFileMetadata = require('file-metadata')
/**
* Attempts to determine if the provided path is an application bundle or not.
*
* macOS differs from the other platforms we support in that a directory can
* also be an application and therefore executable making it unsafe to open
* directories on macOS as we could conceivably end up launching an application.
*
* This application uses file metadata (the `mdls` tool to be exact) to
* determine whether a path is actually an application bundle or otherwise
* executable.
*
* NOTE: This method will always return false when not running on macOS.
*/
export async function isApplicationBundle(path: string): Promise<boolean> {
if (process.platform !== 'darwin') {
return false
}
const metadata = await getFileMetadata(path)
if (metadata['contentType'] === 'com.apple.application-bundle') {
return true
}
const contentTypeTree = metadata['contentTypeTree']
if (Array.isArray(contentTypeTree)) {
for (const contentType of contentTypeTree) {
switch (contentType) {
case 'com.apple.application-bundle':
case 'com.apple.application':
case 'public.executable':
return true
}
}
}
return false
}

View file

@ -1,5 +1,6 @@
import * as Path from 'path'
import fileUrl from 'file-url'
import { realpath } from 'fs-extra'
/**
* Resolve and encode the path information into a URL.
@ -10,3 +11,154 @@ export function encodePathAsUrl(...pathSegments: string[]): string {
const path = Path.resolve(...pathSegments)
return fileUrl(path)
}
/**
* Resolve one or more path sequences into an absolute path underneath
* or at the given root path.
*
* The path segments are expected to be relative paths although
* providing an absolute path is also supported. In the case of an
* absolute path segment this method will essentially only verify
* that the absolute path is equal to or deeper in the directory
* tree than the root path.
*
* If the fully resolved path does not reside underneath the root path
* this method will return null.
*
* @param rootPath The path to the root path. The resolved path
* is guaranteed to reside at, or underneath this
* path.
* @param pathSegments One or more paths to join with the root path
* @param options A subset of the Path module. Requires the join,
* resolve, and normalize path functions. Defaults
* to the platform specific path functions but can
* be overriden by providing either Path.win32 or
* Path.posix
*/
async function _resolveWithin(
rootPath: string,
pathSegments: string[],
options: {
join: (...pathSegments: string[]) => string
normalize: (p: string) => string
resolve: (...pathSegments: string[]) => string
} = Path
) {
// An empty root path would let all relative
// paths through.
if (rootPath.length === 0) {
return null
}
const { join, normalize, resolve } = options
const normalizedRoot = normalize(rootPath)
const normalizedRelative = normalize(join(...pathSegments))
// Null bytes has no place in paths.
if (
normalizedRoot.indexOf('\0') !== -1 ||
normalizedRelative.indexOf('\0') !== -1
) {
return null
}
// Resolve to an absolute path. Note that this will not contain
// any directory traversal segments.
const resolved = resolve(normalizedRoot, normalizedRelative)
const realRoot = await realpath(normalizedRoot)
const realResolved = await realpath(resolved)
return realResolved.startsWith(realRoot) ? resolved : null
}
/**
* Resolve one or more path sequences into an absolute path underneath
* or at the given root path.
*
* The path segments are expected to be relative paths although
* providing an absolute path is also supported. In the case of an
* absolute path segment this method will essentially only verify
* that the absolute path is equal to or deeper in the directory
* tree than the root path.
*
* If the fully resolved path does not reside underneath the root path
* this method will return null.
*
* This method will resolve paths using the current platform path
* structure.
*
* @param rootPath The path to the root path. The resolved path
* is guaranteed to reside at, or underneath this
* path.
* @param pathSegments One or more paths to join with the root path
*/
export function resolveWithin(
rootPath: string,
...pathSegments: string[]
): Promise<string | null> {
return _resolveWithin(rootPath, pathSegments)
}
/**
* Resolve one or more path sequences into an absolute path underneath
* or at the given root path.
*
* The path segments are expected to be relative paths although
* providing an absolute path is also supported. In the case of an
* absolute path segment this method will essentially only verify
* that the absolute path is equal to or deeper in the directory
* tree than the root path.
*
* If the fully resolved path does not reside underneath the root path
* this method will return null.
*
* This method will resolve paths using POSIX path syntax.
*
* @param rootPath The path to the root path. The resolved path
* is guaranteed to reside at, or underneath this
* path.
* @param pathSegments One or more paths to join with the root path
*/
export function resolveWithinPosix(
rootPath: string,
...pathSegments: string[]
): Promise<string | null> {
return _resolveWithin(rootPath, pathSegments, Path.posix)
}
/**
* Resolve one or more path sequences into an absolute path underneath
* or at the given root path.
*
* The path segments are expected to be relative paths although
* providing an absolute path is also supported. In the case of an
* absolute path segment this method will essentially only verify
* that the absolute path is equal to or deeper in the directory
* tree than the root path.
*
* If the fully resolved path does not reside underneath the root path
* this method will return null.
*
* This method will resolve paths using Windows path syntax.
*
* @param rootPath The path to the root path. The resolved path
* is guaranteed to reside at, or underneath this
* path.
* @param pathSegments One or more paths to join with the root path
*/
export function resolveWithinWin32(
rootPath: string,
...pathSegments: string[]
): Promise<string | null> {
return _resolveWithin(rootPath, pathSegments, Path.win32)
}
export const win32 = {
resolveWithin: resolveWithinWin32,
}
export const posix = {
resolveWithin: resolveWithinPosix,
}

View file

@ -9,6 +9,7 @@ import {
import { compare } from '../compare'
import { BaseStore } from './base-store'
import { getStealthEmailForUser, getLegacyStealthEmailForUser } from '../email'
import { DefaultMaxHits } from '../../ui/autocompletion/common'
/** Don't fetch mentionables more often than every 10 minutes */
const MaxFetchFrequency = 10 * 60 * 1000
@ -116,10 +117,7 @@ export class GitHubUserStore extends BaseStore {
response.etag
)
if (
this.queryCache !== null &&
this.queryCache.repository.dbID === repository.dbID
) {
if (this.queryCache?.repository.dbID === repository.dbID) {
this.queryCache = null
this.clearCachePruneTimeout()
}
@ -149,7 +147,7 @@ export class GitHubUserStore extends BaseStore {
public async getMentionableUsersMatching(
repository: GitHubRepository,
query: string,
maxHits: number = 100
maxHits: number = DefaultMaxHits
): Promise<ReadonlyArray<IMentionableUser>> {
assertPersisted(repository)
@ -164,8 +162,7 @@ export class GitHubUserStore extends BaseStore {
const needle = query.toLowerCase()
// Simple substring comparison on login and real name
for (let i = 0; i < users.length && hits.length < maxHits; i++) {
const user = users[i]
for (const user of users) {
const ix = `${user.login} ${user.name}`
.trim()
.toLowerCase()
@ -185,6 +182,7 @@ export class GitHubUserStore extends BaseStore {
.sort(
(x, y) => compare(x.ix, y.ix) || compare(x.user.login, y.user.login)
)
.slice(0, maxHits)
.map(h => h.user)
}

View file

@ -2,14 +2,34 @@ import { IssuesDatabase, IIssue } from '../databases/issues-database'
import { API, IAPIIssue } from '../api'
import { Account } from '../../models/account'
import { GitHubRepository } from '../../models/github-repository'
import { fatalError } from '../fatal-error'
import { compare, compareDescending } from '../compare'
import { DefaultMaxHits } from '../../ui/autocompletion/common'
/** The hard limit on the number of issue results we'd ever return. */
const IssueResultsHardLimit = 100
/** An autocompletion hit for an issue. */
export interface IIssueHit {
/** The title of the issue. */
readonly title: string
/** The issue's number. */
readonly number: number
}
/**
* The max time (in milliseconds) that we'll keep a mentionable query
* cache around before pruning it.
*/
const QueryCacheTimeout = 60 * 1000
interface IQueryCache {
readonly repository: GitHubRepository
readonly issues: ReadonlyArray<IIssueHit>
}
/** The store for GitHub issues. */
export class IssuesStore {
private db: IssuesDatabase
private queryCache: IQueryCache | null = null
private pruneQueryCacheTimeoutId: number | null = null
/** Initialize the store with the given database. */
public constructor(db: IssuesDatabase) {
@ -24,18 +44,13 @@ export class IssuesStore {
private async getLatestUpdatedAt(
repository: GitHubRepository
): Promise<Date | null> {
const gitHubRepositoryID = repository.dbID
if (!gitHubRepositoryID) {
return fatalError(
"Cannot get issues for a repository that hasn't been inserted into the database!"
)
}
assertPersisted(repository)
const db = this.db
const latestUpdatedIssue = await db.issues
.where('[gitHubRepositoryID+updated_at]')
.between([gitHubRepositoryID], [gitHubRepositoryID + 1], true, false)
.between([repository.dbID], [repository.dbID + 1], true, false)
.last()
if (!latestUpdatedIssue || !latestUpdatedIssue.updated_at) {
@ -79,19 +94,14 @@ export class IssuesStore {
issues: ReadonlyArray<IAPIIssue>,
repository: GitHubRepository
): Promise<void> {
const gitHubRepositoryID = repository.dbID
if (!gitHubRepositoryID) {
fatalError(
`Cannot store issues for a repository that hasn't been inserted into the database!`
)
}
assertPersisted(repository)
const issuesToDelete = issues.filter(i => i.state === 'closed')
const issuesToUpsert = issues
.filter(i => i.state === 'open')
.map<IIssue>(i => {
return {
gitHubRepositoryID,
gitHubRepositoryID: repository.dbID,
number: i.number,
title: i.title,
updated_at: i.updated_at,
@ -114,7 +124,7 @@ export class IssuesStore {
await this.db.transaction('rw', this.db.issues, async () => {
for (const issue of issuesToDelete) {
const existing = await findIssueInRepositoryByNumber(
gitHubRepositoryID,
repository.dbID,
issue.number
)
if (existing) {
@ -124,7 +134,7 @@ export class IssuesStore {
for (const issue of issuesToUpsert) {
const existing = await findIssueInRepositoryByNumber(
gitHubRepositoryID,
repository.dbID,
issue.number
)
if (existing) {
@ -134,50 +144,90 @@ export class IssuesStore {
}
}
})
if (this.queryCache?.repository.dbID === repository.dbID) {
this.queryCache = null
this.clearCachePruneTimeout()
}
}
private async getAllIssueHitsFor(repository: GitHubRepository) {
assertPersisted(repository)
const hits = await this.db.getIssuesForRepository(repository.dbID)
return hits.map(i => ({ number: i.number, title: i.title }))
}
/** Get issues whose title or number matches the text. */
public async getIssuesMatching(
repository: GitHubRepository,
text: string
): Promise<ReadonlyArray<IIssue>> {
const gitHubRepositoryID = repository.dbID
if (!gitHubRepositoryID) {
fatalError(
"Cannot get issues for a repository that hasn't been inserted into the database!"
)
}
text: string,
maxHits = DefaultMaxHits
): Promise<ReadonlyArray<IIssueHit>> {
assertPersisted(repository)
const issues =
this.queryCache?.repository.dbID === repository.dbID
? this.queryCache?.issues
: await this.getAllIssueHitsFor(repository)
this.setQueryCache(repository, issues)
if (!text.length) {
const issues = await this.db.issues
.where('gitHubRepositoryID')
.equals(gitHubRepositoryID)
.limit(IssueResultsHardLimit)
.reverse()
.sortBy('number')
return issues
.slice()
.sort((x, y) => compareDescending(x.number, y.number))
.slice(0, maxHits)
}
const MaxScore = 1
const score = (i: IIssue) => {
if (i.number.toString().startsWith(text)) {
return MaxScore
}
const hits = []
const needle = text.toLowerCase()
if (i.title.toLowerCase().includes(text.toLowerCase())) {
return MaxScore - 0.1
}
for (const issue of issues) {
const ix = `${issue.number} ${issue.title}`
.trim()
.toLowerCase()
.indexOf(needle)
return 0
if (ix >= 0) {
hits.push({ hit: { number: issue.number, title: issue.title }, ix })
}
}
const issuesCollection = await this.db.issues
.where('gitHubRepositoryID')
.equals(gitHubRepositoryID)
.filter(i => score(i) > 0)
// Sort hits primarily based on how early in the text the match
// was found and then secondarily using alphabetic order.
return hits
.sort((x, y) => compare(x.ix, y.ix) || compare(x.hit.title, y.hit.title))
.slice(0, maxHits)
.map(h => h.hit)
}
const issues = await issuesCollection.limit(IssueResultsHardLimit).toArray()
private setQueryCache(
repository: GitHubRepository,
issues: ReadonlyArray<IIssueHit>
) {
this.clearCachePruneTimeout()
this.queryCache = { repository, issues }
this.pruneQueryCacheTimeoutId = window.setTimeout(() => {
this.pruneQueryCacheTimeoutId = null
this.queryCache = null
}, QueryCacheTimeout)
}
return issues.sort((a, b) => score(b) - score(a))
private clearCachePruneTimeout() {
if (this.pruneQueryCacheTimeoutId !== null) {
clearTimeout(this.pruneQueryCacheTimeoutId)
this.pruneQueryCacheTimeoutId = null
}
}
}
function assertPersisted(
repo: GitHubRepository
): asserts repo is GitHubRepository & { dbID: number } {
if (repo.dbID === null) {
throw new Error(
`Need a GitHubRepository that's been inserted into the database`
)
}
}

View file

@ -16,7 +16,7 @@ import { fatalError } from '../lib/fatal-error'
import { IMenuItemState } from '../lib/menu-update'
import { LogLevel } from '../lib/logging/log-level'
import { log as writeLog } from './log'
import { openDirectorySafe } from './shell'
import { UNSAFE_openDirectory } from './shell'
import { reportError } from './exception-reporting'
import {
enableSourceMaps,
@ -27,6 +27,8 @@ import { showUncaughtException } from './show-uncaught-exception'
import { ISerializableMenuItem } from '../lib/menu-item'
import { buildContextMenu } from './menu/build-context-menu'
import { sendNonFatalException } from '../lib/helpers/non-fatal-exception'
import { stat } from 'fs-extra'
import { isApplicationBundle } from '../lib/is-application-bundle'
app.setAppLogsPath()
enableSourceMaps()
@ -546,20 +548,66 @@ app.on('ready', () => {
ipcMain.on(
'show-item-in-folder',
(event: Electron.IpcMainEvent, { path }: { path: string }) => {
Fs.stat(path, (err, stats) => {
Fs.stat(path, err => {
if (err) {
log.error(`Unable to find file at '${path}'`, err)
return
}
if (!__DARWIN__ && stats.isDirectory()) {
openDirectorySafe(path)
} else {
shell.showItemInFolder(path)
}
shell.showItemInFolder(path)
})
}
)
ipcMain.on(
'show-folder-contents',
async (event: Electron.IpcMainEvent, { path }: { path: string }) => {
const stats = await stat(path).catch(err => {
log.error(`Unable to retrieve file information for ${path}`, err)
return null
})
if (!stats) {
return
}
if (!stats.isDirectory()) {
log.error(
`Trying to get the folder contents of a non-folder at '${path}'`
)
shell.showItemInFolder(path)
return
}
// On Windows and Linux we can count on a directory being just a
// directory.
if (!__DARWIN__) {
UNSAFE_openDirectory(path)
return
}
// On macOS a directory might also be an app bundle and if it is
// and we attempt to open it we're gonna execute that app which
// it far from ideal so we'll look up the metadata for the path
// and attempt to determine whether it's an app bundle or not.
//
// If we fail loading the metadata we'll assume it's an app bundle
// out of an abundance of caution.
const isBundle = await isApplicationBundle(path).catch(err => {
log.error(`Failed to load metadata for path '${path}'`, err)
return true
})
if (isBundle) {
log.info(
`Preventing direct open of path '${path}' as it appears to be an application bundle`
)
shell.showItemInFolder(path)
} else {
UNSAFE_openDirectory(path)
}
}
)
})
app.on('activate', () => {

View file

@ -4,7 +4,7 @@ import { MenuEvent } from './menu-event'
import { truncateWithEllipsis } from '../../lib/truncate-with-ellipsis'
import { getLogDirectoryPath } from '../../lib/logging/get-log-path'
import { ensureDir } from 'fs-extra'
import { openDirectorySafe } from '../shell'
import { UNSAFE_openDirectory } from '../shell'
import { enableCreateGitHubIssueFromMenu } from '../../lib/feature-flag'
import { MenuLabelsEvent } from '../../models/menu-labels'
import { DefaultEditorLabel } from '../../ui/lib/context-menu'
@ -495,7 +495,7 @@ export function buildDefaultMenu({
const logPath = getLogDirectoryPath()
ensureDir(logPath)
.then(() => {
openDirectorySafe(logPath)
UNSAFE_openDirectory(logPath)
})
.catch(err => {
log.error('Failed opening logs directory', err)

View file

@ -8,9 +8,14 @@ import { shell } from 'electron'
* window, which may confuse users. As a workaround, we will fallback to using
* shell.openExternal for macOS until it can be fixed upstream.
*
* CAUTION: This method should never be used to open user-provided or derived
* paths. It's sole use is to open _directories_ that we know to be safe, no
* verification is performed to ensure that the provided path isn't actually
* an executable.
*
* @param path directory to open
*/
export function openDirectorySafe(path: string) {
export function UNSAFE_openDirectory(path: string) {
if (__DARWIN__) {
const directoryURL = Url.format({
pathname: path,

View file

@ -2269,7 +2269,7 @@ export class App extends React.Component<IAppProps, IAppState> {
return
}
shell.showItemInFolder(repository.path)
shell.showFolderContents(repository.path)
}
private onRepositoryDropdownStateChanged = (newState: DropdownState) => {

View file

@ -0,0 +1,5 @@
/**
* The default maximum number of hits to return from
* either of the autocompletion providers.
*/
export const DefaultMaxHits = 25

View file

@ -1,6 +1,7 @@
import * as React from 'react'
import { IAutocompletionProvider } from './index'
import { compare } from '../../lib/compare'
import { DefaultMaxHits } from './common'
/**
* Interface describing a autocomplete match for the given search
@ -29,7 +30,7 @@ export class EmojiAutocompletionProvider
implements IAutocompletionProvider<IEmojiHit> {
public readonly kind = 'emoji'
private emoji: Map<string, string>
private readonly emoji: Map<string, string>
public constructor(emoji: Map<string, string>) {
this.emoji = emoji
@ -40,15 +41,16 @@ export class EmojiAutocompletionProvider
}
public async getAutocompletionItems(
text: string
text: string,
maxHits = DefaultMaxHits
): Promise<ReadonlyArray<IEmojiHit>> {
// Empty strings is falsy, this is the happy path to avoid
// sorting and matching when the user types a ':'. We want
// to open the popup with suggestions as fast as possible.
if (!text) {
return Array.from(this.emoji.keys()).map<IEmojiHit>(emoji => {
return { emoji: emoji, matchStart: 0, matchLength: 0 }
})
// This is the happy path to avoid sorting and matching
// when the user types a ':'. We want to open the popup
// with suggestions as fast as possible.
if (text.length === 0) {
return [...this.emoji.keys()]
.map(emoji => ({ emoji, matchStart: 0, matchLength: 0 }))
.slice(0, maxHits)
}
const results = new Array<IEmojiHit>()
@ -72,12 +74,14 @@ export class EmojiAutocompletionProvider
//
// If both those start and length are equal we sort
// alphabetically
return results.sort(
(x, y) =>
compare(x.matchStart, y.matchStart) ||
compare(x.emoji.length, y.emoji.length) ||
compare(x.emoji, y.emoji)
)
return results
.sort(
(x, y) =>
compare(x.matchStart, y.matchStart) ||
compare(x.emoji.length, y.emoji.length) ||
compare(x.emoji, y.emoji)
)
.slice(0, maxHits)
}
public renderItem(hit: IEmojiHit) {

View file

@ -1,6 +1,6 @@
import * as React from 'react'
import { IAutocompletionProvider } from './index'
import { IssuesStore } from '../../lib/stores'
import { IssuesStore, IIssueHit } from '../../lib/stores/issues-store'
import { Dispatcher } from '../dispatcher'
import { GitHubRepository } from '../../models/github-repository'
import { ThrottledScheduler } from '../lib/throttled-scheduler'
@ -8,15 +8,6 @@ import { ThrottledScheduler } from '../lib/throttled-scheduler'
/** The interval we should use to throttle the issues update. */
const UpdateIssuesThrottleInterval = 1000 * 60
/** An autocompletion hit for an issue. */
export interface IIssueHit {
/** The title of the issue. */
readonly title: string
/** The issue's number. */
readonly number: number
}
/** The autocompletion provider for issues in a GitHub repository. */
export class IssuesAutocompletionProvider
implements IAutocompletionProvider<IIssueHit> {

View file

@ -1,6 +1,5 @@
import { remote } from 'electron'
import { Disposable, IDisposable } from 'event-kit'
import * as Path from 'path'
import { IAPIOrganization, IAPIRefStatus, IAPIRepository } from '../../lib/api'
import { shell } from '../../lib/app-shell'
@ -97,6 +96,7 @@ import { RebaseFlowStep, RebaseStep } from '../../models/rebase-flow-step'
import { IStashEntry } from '../../models/stash-entry'
import { WorkflowPreferences } from '../../models/workflow-preferences'
import { enableForkSettings } from '../../lib/feature-flag'
import { resolveWithin } from '../../lib/path'
/**
* An error handler function.
@ -1802,10 +1802,15 @@ export class Dispatcher {
}
if (filepath != null) {
const fullPath = Path.join(repository.path, filepath)
// because Windows uses different path separators here
const normalized = Path.normalize(fullPath)
shell.showItemInFolder(normalized)
const resolved = await resolveWithin(repository.path, filepath)
if (resolved !== null) {
shell.showItemInFolder(resolved)
} else {
log.error(
`Prevented attempt to open path outside of the repository root: ${filepath}`
)
}
}
}

View file

@ -11,6 +11,7 @@ export const shell: IAppShell = {
},
beep: () => {},
showItemInFolder: (path: string) => {},
showFolderContents: (path: string) => {},
openExternal: (path: string) => {
return Promise.resolve(true)
},

View file

@ -1,4 +1,9 @@
import { encodePathAsUrl } from '../../src/lib/path'
import { encodePathAsUrl, resolveWithin } from '../../src/lib/path'
import { resolve, basename, join } from 'path'
import { promises } from 'fs'
import { tmpdir } from 'os'
const { rmdir, mkdtemp, symlink, unlink } = promises
describe('path', () => {
describe('encodePathAsUrl', () => {
@ -27,4 +32,65 @@ describe('path', () => {
})
}
})
describe('resolveWithin', async () => {
const root = process.cwd()
it('fails for paths outside of the root', async () => {
expect(await resolveWithin(root, join('..'))).toBeNull()
expect(await resolveWithin(root, join('..', '..'))).toBeNull()
})
it('succeeds for paths that traverse out, and then back into, the root', async () => {
expect(await resolveWithin(root, join('..', basename(root)))).toEqual(
root
)
})
it('fails for paths containing null bytes', async () => {
expect(await resolveWithin(root, 'foo\0bar')).toBeNull()
})
it('succeeds for absolute relative paths as long as they stay within the root', async () => {
const parent = resolve(root, '..')
expect(await resolveWithin(parent, root)).toEqual(root)
})
if (!__WIN32__) {
it('fails for paths that use a symlink to traverse outside of the root', async () => {
const tempDir = await mkdtemp(join(tmpdir(), 'path-test'))
const symlinkName = 'dangerzone'
const symlinkPath = join(tempDir, symlinkName)
try {
await symlink(resolve(tempDir, '..', '..'), symlinkPath)
expect(await resolveWithin(tempDir, symlinkName)).toBeNull()
} finally {
await unlink(symlinkPath)
await rmdir(tempDir)
}
})
it('succeeds for paths that use a symlink to traverse outside of the root and then back again', async () => {
const tempDir = await mkdtemp(join(tmpdir(), 'path-test'))
const symlinkName = 'dangerzone'
const symlinkPath = join(tempDir, symlinkName)
try {
await symlink(resolve(tempDir, '..', '..'), symlinkPath)
const throughSymlinkPath = join(
symlinkName,
basename(resolve(tempDir, '..')),
basename(tempDir)
)
expect(await resolveWithin(tempDir, throughSymlinkPath)).toBe(
resolve(tempDir, throughSymlinkPath)
)
} finally {
await unlink(symlinkPath)
await rmdir(tempDir)
}
})
}
})
})

View file

@ -67,17 +67,41 @@ const commonConfig: webpack.Configuration = {
},
}
export const main = merge({}, commonConfig, {
entry: { main: path.resolve(__dirname, 'src/main-process/main') },
target: 'electron-main',
plugins: [
new webpack.DefinePlugin(
Object.assign({}, replacements, {
__PROCESS_KIND__: JSON.stringify('main'),
})
),
],
})
// Hack: The file-metadata plugin has substantial dependencies
// (plist, DOMParser, etc) and it's only applicable on macOS.
//
// Therefore, when compiling on other platforms, we replace it
// with a tiny shim instead.
const shimFileMetadata = {
resolve: {
alias: {
'file-metadata': path.resolve(
__dirname,
'src',
'lib',
'helpers',
'file-metadata.js'
),
},
},
}
export const main = merge(
{},
commonConfig,
{
entry: { main: path.resolve(__dirname, 'src/main-process/main') },
target: 'electron-main',
plugins: [
new webpack.DefinePlugin(
Object.assign({}, replacements, {
__PROCESS_KIND__: JSON.stringify('main'),
})
),
],
},
process.platform !== 'darwin' ? shimFileMetadata : {}
)
export const renderer = merge({}, commonConfig, {
entry: { renderer: path.resolve(__dirname, 'src/ui/index') },

View file

@ -96,6 +96,11 @@ balanced-match@^1.0.0:
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
base64-js@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.0.tgz#a39992d723584811982be5e290bb6a53d86700f1"
integrity sha1-o5mS1yNYSBGYK+XikLtqU9hnAPE=
bl@^1.0.0:
version "1.2.2"
resolved "https://registry.yarnpkg.com/bl/-/bl-1.2.2.tgz#a160911717103c07410cef63ef51b397c025af9c"
@ -489,6 +494,13 @@ fbjs@^0.8.16, fbjs@^0.8.4:
setimmediate "^1.0.5"
ua-parser-js "^0.7.9"
file-metadata@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/file-metadata/-/file-metadata-1.0.0.tgz#fb3f063667d1fa80e9b6594a9c0b6557d1a0c015"
integrity sha512-ipgdCeX/rx+ar60f3lMYy6dPDaxhYou442tEXn0OrHxX23vD8ABvVUjKal6+h9bBHkgjFMs57Cmc68O0zGAtKQ==
dependencies:
plist "^2.1.0"
file-uri-to-path@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-0.0.2.tgz#37cdd1b5b905404b3f05e1b23645be694ff70f82"
@ -1017,6 +1029,15 @@ path-key@^1.0.0:
resolved "https://registry.yarnpkg.com/path-key/-/path-key-1.0.0.tgz#5d53d578019646c0d68800db4e146e6bdc2ac7af"
integrity sha1-XVPVeAGWRsDWiADbThRua9wqx68=
plist@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/plist/-/plist-2.1.0.tgz#57ccdb7a0821df21831217a3cad54e3e146a1025"
integrity sha1-V8zbeggh3yGDEhejytVOPhRqECU=
dependencies:
base64-js "1.2.0"
xmlbuilder "8.2.2"
xmldom "0.1.x"
prebuild-install@5.3.3:
version "5.3.3"
resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-5.3.3.tgz#ef4052baac60d465f5ba6bf003c9c1de79b9da8e"
@ -1659,6 +1680,16 @@ wrappy@1:
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
xmlbuilder@8.2.2:
version "8.2.2"
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-8.2.2.tgz#69248673410b4ba42e1a6136551d2922335aa773"
integrity sha1-aSSGc0ELS6QuGmE2VR0pIjNap3M=
xmldom@0.1.x:
version "0.1.31"
resolved "https://registry.yarnpkg.com/xmldom/-/xmldom-0.1.31.tgz#b76c9a1bd9f0a9737e5a72dc37231cf38375e2ff"
integrity sha512-yS2uJflVQs6n+CyjHoaBmVSqIDevTAWrzMmjG1Gc7h1qQ7uVozNhEPJAwZXWyGQ/Gafo3fCwrcaokezLPupVyQ==
xtend@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"

View file

@ -56,6 +56,7 @@
},
"dependencies": {
"@primer/octicons": "^9.1.0",
"@types/plist": "^3.0.2",
"@typescript-eslint/eslint-plugin": "3.3.0",
"@typescript-eslint/parser": "3.3.0",
"airbnb-browser-shims": "^3.0.0",

View file

@ -490,6 +490,14 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.24.tgz#d4606afd8cf6c609036b854360367d1b2c78931f"
integrity sha512-1Ciqv9pqwVtW6FsIUKSZNB82E5Cu1I2bBTj1xuIHXLe/1zYLl3956Nbhg2MzSYHVfl9/rmanjbQIb7LibfCnug==
"@types/plist@^3.0.2":
version "3.0.2"
resolved "https://registry.yarnpkg.com/@types/plist/-/plist-3.0.2.tgz#61b3727bba0f5c462fe333542534a0c3e19ccb01"
integrity sha512-ULqvZNGMv0zRFvqn8/4LSPtnmN4MfhlPNtJCTpKuIIxGVGZ2rYWzFXrvEBoh9CVyqSE7D6YFRJ1hydLHI6kbWw==
dependencies:
"@types/node" "*"
xmlbuilder ">=11.0.1"
"@types/prettier@^2.0.1":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.0.1.tgz#b6e98083f13faa1e5231bfa3bdb1b0feff536b6d"
@ -11084,6 +11092,11 @@ xmlbuilder@4.2.1, xmlbuilder@^4.1.0:
dependencies:
lodash "^4.0.0"
xmlbuilder@>=11.0.1:
version "15.1.1"
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-15.1.1.tgz#9dcdce49eea66d8d10b42cae94a79c3c8d0c2ec5"
integrity sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==
xmlbuilder@^9.0.7:
version "9.0.7"
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"