debt - avoid deprecated Buffer ctors

This commit is contained in:
Benjamin Pasero 2018-02-26 13:09:47 +01:00
parent 0c1cb4571f
commit a23edada50
25 changed files with 48 additions and 48 deletions

View file

@ -102,7 +102,7 @@ gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-ed
.pipe(es.through(function (data) {
var json = JSON.parse(data.contents.toString());
json.private = false;
data.contents = new Buffer(JSON.stringify(json, null, ' '));
data.contents = Buffer.from(JSON.stringify(json, null, ' '));
this.emit('data', data);
}))
.pipe(gulp.dest('out-monaco-editor-core')),
@ -142,7 +142,7 @@ gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-ed
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
strContents = strContents.replace(/\/\/\# sourceMappingURL=[^ ]+$/, newStr);
data.contents = new Buffer(strContents);
data.contents = Buffer.from(strContents);
this.emit('data', data);
})).pipe(gulp.dest('out-monaco-editor-core/min')),

View file

@ -148,7 +148,7 @@ const config = {
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinCredits: darwinCreditsTemplate ? new Buffer(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,

View file

@ -570,7 +570,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
contents.push(index < modules.length - 1 ? '\t],' : '\t]');
});
contents.push('});');
emitter.queue(new File({ path: bundle + '.nls.' + language.id + '.js', contents: new Buffer(contents.join('\n'), 'utf-8') }));
emitter.queue(new File({ path: bundle + '.nls.' + language.id + '.js', contents: Buffer.from(contents.join('\n'), 'utf-8') }));
});
});
Object.keys(statistics).forEach(key => {
@ -667,7 +667,7 @@ export function createXlfFilesForCoreBundle(): ThroughStream {
const filePath = `${xlf.project}/${resource.replace(/\//g, '_')}.xlf`;
const xlfFile = new File({
path: filePath,
contents: new Buffer(xlf.toString(), 'utf8')
contents: Buffer.from(xlf.toString(), 'utf8')
});
this.queue(xlfFile);
}
@ -738,7 +738,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
if (_xlf) {
let xlfFile = new File({
path: path.join(extensionsProject, extensionName + '.xlf'),
contents: new Buffer(_xlf.toString(), 'utf8')
contents: Buffer.from(_xlf.toString(), 'utf8')
});
folderStream.queue(xlfFile);
}
@ -810,7 +810,7 @@ export function createXlfFilesForIsl(): ThroughStream {
// Emit only upon all ISL files combined into single XLF instance
const newFilePath = path.join(projectName, resourceFile);
const xlfFile = new File({ path: newFilePath, contents: new Buffer(xlf.toString(), 'utf-8') });
const xlfFile = new File({ path: newFilePath, contents: Buffer.from(xlf.toString(), 'utf-8') });
this.queue(xlfFile);
});
}
@ -1174,7 +1174,7 @@ function createI18nFile(originalFilePath: string, messages: any): File {
let content = JSON.stringify(result, null, '\t').replace(/\r\n/g, '\n');
return new File({
path: path.join(originalFilePath + '.i18n.json'),
contents: new Buffer(content, 'utf8')
contents: Buffer.from(content, 'utf8')
});
}
@ -1328,7 +1328,7 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
return new File({
path: filePath,
contents: iconv.encode(new Buffer(content.join('\r\n'), 'utf8'), innoSetup.codePage)
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8'), innoSetup.codePage)
});
}

View file

@ -131,7 +131,7 @@ module nls {
export function fileFrom(file: File, contents: string, path: string = file.path) {
return new File({
contents: new Buffer(contents),
contents: Buffer.from(contents),
base: file.base,
cwd: file.cwd,
path: path

View file

@ -71,7 +71,7 @@ function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWr
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: new Buffer(bundledFileHeader)
contents: Buffer.from(bundledFileHeader)
}));
this.emit('data', data);
} else {
@ -115,7 +115,7 @@ function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
contents: new Buffer(source.contents)
contents: Buffer.from(source.contents)
});
});
@ -199,7 +199,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
base: '.',
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t'))
}));
}
es.readArray(bundleInfoArray).pipe(bundleInfoStream);

View file

@ -190,7 +190,7 @@ export function loadSourcemaps(): NodeJS.ReadWriteStream {
return;
}
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => {
if (err) { return cb(err); }
@ -209,7 +209,7 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
const output = input
.pipe(es.mapSync<VinylFile, VinylFile>(f => {
const contents = (<Buffer>f.contents).toString('utf8');
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f;
}));

View file

@ -9,7 +9,7 @@ const es = require('event-stream');
function handleDeletions() {
return es.mapSync(f => {
if (/\.ts$/.test(f.relative) && !f.contents) {
f.contents = new Buffer('');
f.contents = Buffer.from('');
f.stat = { mtime: new Date() };
}

View file

@ -239,7 +239,7 @@ export async function grep(filename: string, pattern: RegExp): Promise<boolean>
export function readBytes(stream: Readable, bytes: number): Promise<Buffer> {
return new Promise<Buffer>((complete, error) => {
let done = false;
let buffer = new Buffer(bytes);
let buffer = Buffer.allocUnsafe(bytes);
let bytesRead = 0;
stream.on('data', (data: Buffer) => {

View file

@ -57,7 +57,7 @@ log('ELECTRON_RUN_AS_NODE: ' + process.env['ELECTRON_RUN_AS_NODE']);
var fsWriteSyncString = function (fd: number, str: string, _position: number, encoding?: string) {
// fs.writeSync(fd, string[, position[, encoding]]);
var buf = new Buffer(str, encoding || 'utf8');
var buf = Buffer.from(str, encoding || 'utf8');
return fsWriteSyncBuffer(fd, buf, 0, buf.length);
};

View file

@ -8,28 +8,28 @@ import * as stream from 'stream';
const DefaultSize: number = 8192;
const ContentLength: string = 'Content-Length: ';
const ContentLengthSize: number = Buffer.byteLength(ContentLength, 'utf8');
const Blank: number = new Buffer(' ', 'utf8')[0];
const BackslashR: number = new Buffer('\r', 'utf8')[0];
const BackslashN: number = new Buffer('\n', 'utf8')[0];
const Blank: number = Buffer.from(' ', 'utf8')[0];
const BackslashR: number = Buffer.from('\r', 'utf8')[0];
const BackslashN: number = Buffer.from('\n', 'utf8')[0];
class ProtocolBuffer {
private index: number = 0;
private buffer: Buffer = new Buffer(DefaultSize);
private buffer: Buffer = Buffer.allocUnsafe(DefaultSize);
public append(data: string | Buffer): void {
let toAppend: Buffer | null = null;
if (Buffer.isBuffer(data)) {
toAppend = <Buffer>data;
} else {
toAppend = new Buffer(<string>data, 'utf8');
toAppend = Buffer.from(<string>data, 'utf8');
}
if (this.buffer.length - this.index >= toAppend.length) {
toAppend.copy(this.buffer, this.index, 0, toAppend.length);
} else {
let newSize = (Math.ceil((this.index + toAppend.length) / DefaultSize) + 1) * DefaultSize;
if (this.index === 0) {
this.buffer = new Buffer(newSize);
this.buffer = Buffer.allocUnsafe(newSize);
toAppend.copy(this.buffer, 0, 0, toAppend.length);
} else {
this.buffer = Buffer.concat([this.buffer.slice(0, this.index), toAppend], newSize);

View file

@ -59,7 +59,7 @@ log('ELECTRON_RUN_AS_NODE: ' + process.env['ELECTRON_RUN_AS_NODE']);
var fsWriteSyncString = function (fd, str, position, encoding) {
// fs.writeSync(fd, string[, position[, encoding]]);
var buf = new Buffer(str, encoding || 'utf8');
var buf = Buffer.from(str, encoding || 'utf8');
return fsWriteSyncBuffer(fd, buf, 0, buf.length);
};

View file

@ -38,7 +38,7 @@ export function readExactlyByFile(file: string, totalBytes: number): TPromise<Re
});
}
const buffer = new Buffer(totalBytes);
const buffer = Buffer.allocUnsafe(totalBytes);
let offset = 0;
function readChunk(): void {
@ -96,7 +96,7 @@ export function readToMatchingString(file: string, matchingString: string, chunk
});
}
let buffer = new Buffer(maximumBytesToRead);
let buffer = Buffer.allocUnsafe(maximumBytesToRead);
let offset = 0;
function readChunk(): void {

View file

@ -86,5 +86,5 @@ export function onError(error: Error, done: () => void): void {
}
export function toResource(this: any, path: string) {
return URI.file(paths.join('C:\\', new Buffer(this.test.fullTitle()).toString('base64'), path));
return URI.file(paths.join('C:\\', Buffer.from(this.test.fullTitle()).toString('base64'), path));
}

View file

@ -12,10 +12,10 @@ suite('Decoder', () => {
test('decoding', function () {
const lineDecoder = new decoder.LineDecoder();
let res = lineDecoder.write(new Buffer('hello'));
let res = lineDecoder.write(Buffer.from('hello'));
assert.equal(res.length, 0);
res = lineDecoder.write(new Buffer('\nworld'));
res = lineDecoder.write(Buffer.from('\nworld'));
assert.equal(res[0], 'hello');
assert.equal(res.length, 1);

View file

@ -82,7 +82,7 @@ async function postLogs(
result = await requestService.request({
url: endpoint.url,
type: 'POST',
data: new Buffer(fs.readFileSync(outZip)).toString('base64'),
data: Buffer.from(fs.readFileSync(outZip)).toString('base64'),
headers: {
'Content-Type': 'application/zip'
}

View file

@ -57,7 +57,7 @@ export class ClipboardService implements IClipboardService {
}
private filesToBuffer(resources: URI[]): Buffer {
return new Buffer(resources.map(r => r.fsPath).join('\n'));
return Buffer.from(resources.map(r => r.fsPath).join('\n'));
}
private bufferToFiles(buffer: Buffer): URI[] {

View file

@ -43,7 +43,7 @@ export const xhrRequest: IRequestFunction = (options: IRequestOptions): TPromise
constructor(arraybuffer: ArrayBuffer) {
super();
this._buffer = new Buffer(new Uint8Array(arraybuffer));
this._buffer = Buffer.from(new Uint8Array(arraybuffer));
this._offset = 0;
this._length = this._buffer.length;
}

View file

@ -21,7 +21,7 @@ export abstract class V8Protocol {
this.sequence = 1;
this.contentLength = -1;
this.pendingRequests = new Map<number, (e: DebugProtocol.Response) => void>();
this.rawData = new Buffer(0);
this.rawData = Buffer.allocUnsafe(0);
}
public getId(): string {

View file

@ -20,7 +20,7 @@ import { IEditorGroupService } from 'vs/workbench/services/group/common/groupSer
import { IModelService } from 'vs/editor/common/services/modelService';
function toResource(self, path) {
return URI.file(join('C:\\', new Buffer(self.test.fullTitle()).toString('base64'), path));
return URI.file(join('C:\\', Buffer.from(self.test.fullTitle()).toString('base64'), path));
}
class ServiceAccessor {

View file

@ -28,7 +28,7 @@ class TestFileEditorTracker extends FileEditorTracker {
}
function toResource(self: any, path: string) {
return URI.file(join('C:\\', new Buffer(self.test.fullTitle()).toString('base64'), path));
return URI.file(join('C:\\', Buffer.from(self.test.fullTitle()).toString('base64'), path));
}
class ServiceAccessor {

View file

@ -231,7 +231,7 @@ class OutputChannelBackedByFile extends AbstractFileOutputChannel implements Out
append(message: string): void {
// update end offset always as message is read
this.endOffset = this.endOffset + new Buffer(message).byteLength;
this.endOffset = this.endOffset + Buffer.from(message).byteLength;
if (this.loadingFromFileInProgress) {
this.appendedMessage += message;
} else {
@ -258,7 +258,7 @@ class OutputChannelBackedByFile extends AbstractFileOutputChannel implements Out
this.appendedMessage = '';
return this.loadFile()
.then(content => {
if (this.endOffset !== this.startOffset + new Buffer(content).byteLength) {
if (this.endOffset !== this.startOffset + Buffer.from(content).byteLength) {
// Queue content is not written into the file
// Flush it and load file again
this.flush();
@ -374,7 +374,7 @@ class FileOutputChannel extends AbstractFileOutputChannel implements OutputChann
loadModel(): TPromise<ITextModel> {
return this.fileService.resolveContent(this.file, { position: this.startOffset })
.then(content => {
this.endOffset = this.startOffset + new Buffer(content.value).byteLength;
this.endOffset = this.startOffset + Buffer.from(content.value).byteLength;
return this.createModel(content.value);
});
}
@ -388,7 +388,7 @@ class FileOutputChannel extends AbstractFileOutputChannel implements OutputChann
this.fileService.resolveContent(this.file, { position: this.endOffset })
.then(content => {
if (content.value) {
this.endOffset = this.endOffset + new Buffer(content.value).byteLength;
this.endOffset = this.endOffset + Buffer.from(content.value).byteLength;
this.appendToModel(content.value);
}
this.updateInProgress = false;

View file

@ -35,7 +35,7 @@ function toResource(path: string) {
}
function toFileResource(self: any, path: string) {
return URI.file(paths.join('C:\\', new Buffer(self.test.fullTitle()).toString('base64'), path));
return URI.file(paths.join('C:\\', Buffer.from(self.test.fullTitle()).toString('base64'), path));
}
class TestEditorPart implements IEditorPart {

View file

@ -936,7 +936,7 @@ suite('FileService', () => {
const resource = uri.file(path.join(testDir, 'some_utf8_bom.txt'));
service.resolveContent(resource).done(c => {
assert.equal(encodingLib.detectEncodingByBOMFromBuffer(new Buffer(c.value), 512), null);
assert.equal(encodingLib.detectEncodingByBOMFromBuffer(Buffer.from(c.value), 512), null);
done();
}, error => onError(error, done));
@ -1097,7 +1097,7 @@ suite('FileService', () => {
test('resolveContent - from position (with umlaut)', function (done: () => void) {
const resource = uri.file(path.join(testDir, 'small_umlaut.txt'));
service.resolveContent(resource, { position: new Buffer('Small File with Ü').length }).done(content => {
service.resolveContent(resource, { position: Buffer.from('Small File with Ü').length }).done(content => {
assert.equal(content.value, 'mlaut');
done();
}, error => onError(error, done));

View file

@ -169,7 +169,7 @@ export class SearchWorkerEngine {
return resolve(null);
}
const buffer = new Buffer(options.bufferLength);
const buffer = Buffer.allocUnsafe(options.bufferLength);
let line = '';
let lineNumber = 0;
let lastBufferHadTrailingCR = false;

View file

@ -35,7 +35,7 @@ suite('RipgrepParser', () => {
}
function parseInputStrings(inputChunks: string[]): ISerializedFileMatch[] {
return parseInput(inputChunks.map(chunk => new Buffer(chunk)));
return parseInput(inputChunks.map(chunk => Buffer.from(chunk)));
}
function parseInput(inputChunks: Buffer[]): ISerializedFileMatch[] {
@ -157,14 +157,14 @@ suite('RipgrepParser', () => {
test('Parses chunks broken in the middle of a multibyte character', () => {
const multibyteStr = '漢';
const multibyteBuf = new Buffer(multibyteStr);
const multibyteBuf = Buffer.from(multibyteStr);
const text = getFileLine('foo/bar') + '\n' + getMatchLine(0, ['before', 'match', 'after']) + '\n';
// Split the multibyte char into two pieces and divide between the two buffers
const beforeIndex = 24;
const inputBufs = [
Buffer.concat([new Buffer(text.substr(0, beforeIndex)), multibyteBuf.slice(0, 2)]),
Buffer.concat([multibyteBuf.slice(2), new Buffer(text.substr(beforeIndex))])
Buffer.concat([Buffer.from(text.substr(0, beforeIndex)), multibyteBuf.slice(0, 2)]),
Buffer.concat([multibyteBuf.slice(2), Buffer.from(text.substr(beforeIndex))])
];
const results = parseInput(inputBufs);