Fix some ESLint errors

This commit is contained in:
Bill 2021-02-17 00:30:41 -05:00
parent bfe731d286
commit 15d99a58fa
18 changed files with 228 additions and 131 deletions

View File

@ -0,0 +1,39 @@
TypeError: NitroBundle_1.default is not a constructor
TypeError: NitroBundle_1.default is not a constructor
TypeError: NitroBundle_1.default is not a constructor
TypeError: NitroBundle_1.default is not a constructor
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function
TypeError: zlib_1.gzip.__promisify__ is not a function

View File

@ -53,7 +53,7 @@ export class BundleProvider
}
if(isProhibited) continue;
BundleProvider.imageSource.set(names[i].substring(habboAssetSWF.getDocumentClass().length + 1), imageTag.className.substring(habboAssetSWF.getDocumentClass().length + 1));
images.push({

View File

@ -24,7 +24,7 @@ export class SWFConverter
const binaryData = SWFConverter.getBinaryData(habboAssetSWF, paletteName, false);
if(!binaryData || !binaryData.binaryDataBuffer) return null;
const byteBuffer = wrap(binaryData.binaryDataBuffer);
const paletteColors: [ number, number, number ][] = [];
@ -69,7 +69,7 @@ export class SWFConverter
const binaryData = SWFConverter.getBinaryData(habboAssetSWF, 'assets', true);
if(!binaryData) return null;
return await parseStringPromise(binaryData.binaryData);
}
@ -78,7 +78,7 @@ export class SWFConverter
const binaryData = SWFConverter.getBinaryData(habboAssetSWF, 'logic', true);
if(!binaryData) return null;
return await parseStringPromise(binaryData.binaryData);
}
@ -87,7 +87,7 @@ export class SWFConverter
const binaryData = SWFConverter.getBinaryData(habboAssetSWF, 'index', false);
if(!binaryData) return null;
return await parseStringPromise(binaryData.binaryData);
}
@ -96,7 +96,7 @@ export class SWFConverter
const binaryData = SWFConverter.getBinaryData(habboAssetSWF, 'visualization', true);
if(!binaryData) return null;
return await parseStringPromise(binaryData.binaryData);
}
}

View File

@ -10,7 +10,7 @@ import { AssetMapper, IndexMapper, LogicMapper, VisualizationMapper } from '../.
import { HabboAssetSWF } from '../../swf/HabboAssetSWF';
import File from '../../utils/File';
import Logger from '../../utils/Logger';
import NitroBundle from '../../utils/NitroBundle';
import { NitroBundle } from '../../utils/NitroBundle';
import { FurnitureDownloader } from './FurnitureDownloader';
@singleton()

View File

@ -49,7 +49,7 @@ export class FurnitureDownloader
count ++;
}
catch(error)
catch (error)
{
console.log();
console.error(error);
@ -76,7 +76,7 @@ export class FurnitureDownloader
await this.extractFurniture(revision, className, callback);
}
catch(error)
catch (error)
{
console.log();
console.error(error);
@ -96,7 +96,7 @@ export class FurnitureDownloader
public async parseFurniData(): Promise<IFurnitureData>
{
const url = this._configuration.getValue('furnidata.url');
if(!url || !url.length) return null;
try
@ -108,7 +108,7 @@ export class FurnitureDownloader
return (JSON.parse(content) as IFurnitureData);
}
catch(error)
catch (error)
{
console.log();
console.error(error);
@ -129,7 +129,7 @@ export class FurnitureDownloader
const buffer = await FileUtilities.readFileAsBuffer(url);
if(!buffer) return;
const newHabboAssetSWF = new HabboAssetSWF(buffer);
await newHabboAssetSWF.setupAsync();

View File

@ -10,7 +10,7 @@ import { AssetMapper, IndexMapper, LogicMapper, VisualizationMapper } from '../.
import { HabboAssetSWF } from '../../swf/HabboAssetSWF';
import File from '../../utils/File';
import Logger from '../../utils/Logger';
import NitroBundle from '../../utils/NitroBundle';
import { NitroBundle } from '../../utils/NitroBundle';
import { PetDownloader } from './PetDownloader';
@singleton()

View File

@ -30,7 +30,7 @@ export class PetDownloader
await this.extractPet(petType, callback);
}
catch(error)
catch (error)
{
console.log();
console.error(error);
@ -54,14 +54,14 @@ export class PetDownloader
const petTypes: string[] = [];
const pets = this._config.getValue('pet.configuration');
if(pets)
{
const types = pets.split(',');
for(const type of types) petTypes.push(type);
}
return petTypes;
}

View File

@ -42,7 +42,7 @@ export class AssetMapper extends Mapper
if(assetXML.name !== undefined)
{
let isProhibited = false;
for(const size of AssetMapper.PROHIBITED_SIZES)
{
if(assetXML.name.indexOf(('_' + size + '_')) >= 0)
@ -73,7 +73,7 @@ export class AssetMapper extends Mapper
asset.source = BundleProvider.imageSource.get(assetXML.name) as string;
}
}
if(assetXML.x !== undefined) asset.x = assetXML.x;
if(assetXML.y !== undefined) asset.y = assetXML.y;
if(assetXML.flipH !== undefined) asset.flipH = assetXML.flipH;

View File

@ -32,7 +32,7 @@ export class VisualizationMapper extends Mapper
if(visualizationDataXML.size !== undefined)
{
let isProhibited = false;
for(const size of VisualizationMapper.PROHIBITED_SIZES)
{
if(visualizationDataXML.size === parseInt(size))
@ -45,7 +45,7 @@ export class VisualizationMapper extends Mapper
if(isProhibited) continue;
}
const visualizationData: IAssetVisualizationData = {};
if(visualizationDataXML.angle !== undefined) visualizationData.angle = visualizationDataXML.angle;
@ -197,7 +197,7 @@ export class VisualizationMapper extends Mapper
private static mapVisualizationAnimationXML(xml: AnimationXML[], output: { [index: string]: IAssetAnimation }): void
{
if(!xml || !xml.length) return;
for(const animationXML of xml)
{
const animation: IAssetAnimation = {};
@ -313,7 +313,7 @@ export class VisualizationMapper extends Mapper
{
if(!xml || !xml.length) return;
let i = 0;
const i = 0;
for(const offsetXML of xml)
{

View File

@ -49,13 +49,13 @@ export class FurnitureTypeXML
if(xml.partcolors)
{
for(let key in xml.partcolors)
for(const key in xml.partcolors)
{
const colorData = xml.partcolors[key].color;
if(colorData)
{
for(let color of colorData)
for(const color of colorData)
{
let code = color;

View File

@ -41,8 +41,8 @@ export class HabboAssetSWF
console.log(tag);
break;
case 35:
const jpegTag: any = await readImagesJPEG(35, tag);
case 35: {
const jpegTag = await readImagesJPEG(35, tag);
this._tags.push(new ImageTag({
code: jpegTag.code,
characterID: jpegTag.characterId,
@ -52,8 +52,9 @@ export class HabboAssetSWF
bitmapHeight: jpegTag.bitmapHeight
}));
break;
}
case 36:
case 36: {
const pngTag: any = await readImagesDefineBitsLossless(tag);
this._tags.push(new ImageTag({
code: pngTag.code,
@ -64,6 +65,7 @@ export class HabboAssetSWF
bitmapHeight: pngTag.bitmapHeight
}));
break;
}
case 20:
console.log(tag);
@ -103,9 +105,11 @@ export class HabboAssetSWF
let iterator: CustomIterator<ITag> = new CustomIterator(this._tags);
// eslint-disable-next-line no-constant-condition
while(true)
{
let t: ITag;
do
{
if(!iterator.hasNext())
@ -184,6 +188,7 @@ export class HabboAssetSWF
const iterator: CustomIterator<ITag> = new CustomIterator(this._tags);
// eslint-disable-next-line no-constant-condition
while(true)
{
let t: ITag;

View File

@ -39,6 +39,7 @@ export default class CustomIterator<TType>
typeof candidate === 'object' &&
typeof candidate.length === 'number' &&
typeof candidate.splice === 'function' &&
// eslint-disable-next-line no-prototype-builtins
!(candidate.propertyIsEnumerable('length'));
}
@ -63,4 +64,4 @@ export default class CustomIterator<TType>
{
return this.arr ? this.idx <= this.top : this.idx <= this.keys.length;
}
}
}

View File

@ -1,6 +1,4 @@
import { RmOptions } from 'fs';
const fs = require('fs');
import { existsSync, lstatSync, mkdirSync, readdirSync, RmOptions, rmSync } from 'fs';
export default class File
{
@ -14,24 +12,24 @@ export default class File
public exists(): boolean
{
return fs.existsSync(this._path);
return existsSync(this._path);
}
public mkdirs(): void
{
return fs.mkdirSync(this._path);
return mkdirSync(this._path);
}
public list(): string[]
{
const test = fs.readdirSync(this._path);
const test = readdirSync(this._path);
return test;
}
public isDirectory(): boolean
{
return this.exists() && fs.lstatSync(this._path).isDirectory();
return this.exists() && lstatSync(this._path).isDirectory();
}
get path(): string
@ -41,6 +39,6 @@ export default class File
public rmdir(options: RmOptions): void
{
return fs.rmSync(this._path, options);
return rmSync(this._path, options);
}
}

View File

@ -29,7 +29,7 @@ export class FileUtilities
return content;
}
catch(error)
catch (error)
{
console.log();
console.error(error);
@ -60,7 +60,7 @@ export class FileUtilities
return content;
}
catch(error)
catch (error)
{
console.log();
console.error(error);

View File

@ -1,23 +1,22 @@
import { createWriteStream, existsSync } from 'fs';
import { appendFile } from 'fs/promises';
import { singleton } from 'tsyringe';
const fs = require('fs');
const fsAsync = require('fs/promises');
@singleton()
export default class Logger
{
constructor()
{
if(!fs.existsSync('error.log'))
if(!existsSync('error.log'))
{
const createStream = fs.createWriteStream('error.log');
const createStream = createWriteStream('error.log');
createStream.end();
}
}
public logErrorAsync(message: string): Promise<void>
{
return fsAsync.appendFile('error.log', message + '\n');
return appendFile('error.log', message + '\n');
}
}
}

View File

@ -1,7 +1,7 @@
const ByteBuffer = require('bytebuffer');
const { gzip } = require('node-gzip');
import * as ByteBuffer from 'bytebuffer';
import { gzip } from 'zlib';
export default class NitroBundle
export class NitroBundle
{
private readonly _files: Map<string, Buffer>;
@ -19,7 +19,7 @@ export default class NitroBundle
{
const buffer = new ByteBuffer();
buffer.writeUInt16(this._files.size);
buffer.writeUint16(this._files.size);
const iterator = this._files.entries();
let result: IteratorResult<[string, Buffer]> = iterator.next();
@ -31,7 +31,7 @@ export default class NitroBundle
buffer.writeUint16(fileName.length);
buffer.writeString(fileName);
const compressed = await gzip(file);
const compressed = await gzip.__promisify__(file);
buffer.writeUint32(compressed.length);
buffer.append(compressed);
@ -40,4 +40,4 @@ export default class NitroBundle
return buffer.flip().toBuffer();
}
}
}

View File

@ -5,7 +5,7 @@ export class SlicedToArray
if(Array.isArray(arr)) return arr;
if(Symbol.iterator in Object(arr)) return SlicedToArray.sliceIterator(arr, i);
throw new TypeError('Invalid attempt to destructure non-iterable instance');
}

View File

@ -20,53 +20,75 @@ const _decoder = require('jpg-stream/decoder');
const _decoder2 = _interopRequireDefault(_decoder);
function _interopRequireDefault(obj: any) {
return obj && obj.__esModule ? obj : {default: obj};
function _interopRequireDefault(obj: any)
{
return obj && obj.__esModule ? obj : { default: obj };
}
const _concatFrames = require('concat-frames');
const _concatFrames2 = _interopRequireDefault(_concatFrames);
const _slicedToArray = function () {
function sliceIterator(arr: any, i: any) {
var _arr = [];
var _n = true;
var _d = false;
var _e = undefined;
try {
for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {
const _slicedToArray = function ()
{
function sliceIterator(arr: any, i: any)
{
const _arr = [];
let _n = true;
let _d = false;
let _e = undefined;
try
{
for(var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true)
{
_arr.push(_s.value);
if (i && _arr.length === i) break;
if(i && _arr.length === i) break;
}
} catch (err) {
}
catch (err)
{
_d = true;
_e = err;
} finally {
try {
if (!_n && _i["return"]) _i["return"]();
} finally {
if (_d) throw _e;
}
finally
{
try
{
if(!_n && _i['return']) _i['return']();
}
finally
{
if(_d) throw _e;
}
}
return _arr;
}
return function (arr: any, i: any) {
if (Array.isArray(arr)) {
return function (arr: any, i: any)
{
if(Array.isArray(arr))
{
return arr;
} else if (Symbol.iterator in Object(arr)) {
}
else if(Symbol.iterator in Object(arr))
{
return sliceIterator(arr, i);
} else {
throw new TypeError("Invalid attempt to destructure non-iterable instance");
}
else
{
throw new TypeError('Invalid attempt to destructure non-iterable instance');
}
};
}();
export function readSwfAsync(data: string | Buffer): Promise<any> {
return new Promise<any>(((resolve, reject) => {
SWFReader.read(data, function (err: Error, swf: any) {
if (err) {
export function readSwfAsync(data: string | Buffer): Promise<any>
{
return new Promise<any>(((resolve, reject) =>
{
SWFReader.read(data, function (err: Error, swf: any)
{
if(err)
{
reject(err);
}
resolve(swf);
@ -76,18 +98,21 @@ export function readSwfAsync(data: string | Buffer): Promise<any> {
const pngMagic = Buffer.from('0x89 0x50 0x4E 0x47 0x0D 0x0A 0x1A 0x0A'.split(' ').map(Number));
const gifMagic = Buffer.from('0x47 0x49 0x46 0x38 0x39 0x61'.split(' ').map(Number));
const recognizeHeader = function recognizeHeader(buffer: Buffer) {
if (pngMagic.equals(buffer.slice(0, pngMagic.length))) return 'png';
if (gifMagic.equals(buffer.slice(0, gifMagic.length))) return 'gif';
const recognizeHeader = function recognizeHeader(buffer: Buffer)
{
if(pngMagic.equals(buffer.slice(0, pngMagic.length))) return 'png';
if(gifMagic.equals(buffer.slice(0, gifMagic.length))) return 'gif';
return 'jpeg';
};
export async function readImagesJPEG(code: number, tagData: any): Promise<any> {
var characterId = tagData.characterId,
export async function readImagesJPEG(code: number, tagData: any): Promise<any>
{
const characterId = tagData.characterId,
imageData = tagData.imageData;
var imgType = recognizeHeader(imageData);
if (imgType !== 'jpeg') {
const imgType = recognizeHeader(imageData);
if(imgType !== 'jpeg')
{
return {
code: code,
characterId: characterId,
@ -96,14 +121,17 @@ export async function readImagesJPEG(code: number, tagData: any): Promise<any> {
};
}
var bitmapAlphaData = tagData.bitmapAlphaData;
const bitmapAlphaData = tagData.bitmapAlphaData;
return new Promise(function (resolve, reject) {
var enc = new _encoder2.default(undefined, undefined, {colorSpace: 'rgba'});
_zlib2.default.unzip(bitmapAlphaData, function (err: any, alphaBufPre: any) {
return new Promise(function (resolve, reject)
{
const enc = new _encoder2.default(undefined, undefined, { colorSpace: 'rgba' });
_zlib2.default.unzip(bitmapAlphaData, function (err: any, alphaBufPre: any)
{
// INVARIANT: alphaBuf is either null or a non-empty buffer
let alphaBuf: any = null;
if (err) {
if(err)
{
/*
Due to a bug present in node zlib (https://github.com/nodejs/node/issues/17041)
unzipping an empty buffer can raise "unexpected end of file" error.
@ -113,33 +141,41 @@ export async function readImagesJPEG(code: number, tagData: any): Promise<any> {
other two zlib.unzip call happens at sites that an empty uncompressed Buffer
does not make sense. So I think the current fix is good enough.
*/
if (bitmapAlphaData.length > 0) {
if(bitmapAlphaData.length > 0)
{
return reject(new Error(err));
}
// leaving alphaBuf as null
} else {
// ensure alphaBuf is only assigned an non-empty Buffer
if (alphaBufPre.length > 0) alphaBuf = alphaBufPre;
}
var bufferStream = new _stream2.default.PassThrough();
else
{
// ensure alphaBuf is only assigned an non-empty Buffer
if(alphaBufPre.length > 0) alphaBuf = alphaBufPre;
}
const bufferStream = new _stream2.default.PassThrough();
bufferStream.end(imageData);
bufferStream.pipe(new _decoder2.default()).pipe((_concatFrames2.default)(function (_ref: any) {
var _ref2 = _slicedToArray(_ref, 1),
bufferStream.pipe(new _decoder2.default()).pipe((_concatFrames2.default)(function (_ref: any)
{
const _ref2 = _slicedToArray(_ref, 1),
frame = _ref2[0];
var input = frame.pixels;
var pCount = frame.width * frame.height;
var output = Buffer.alloc(pCount * 4);
if (alphaBuf !== null && alphaBuf.length !== pCount) {
const input = frame.pixels;
const pCount = frame.width * frame.height;
const output = Buffer.alloc(pCount * 4);
if(alphaBuf !== null && alphaBuf.length !== pCount)
{
console.error('expect alphaBuf to have size ' + pCount + ' while getting ' + alphaBuf.length);
}
var getAlphaBuf = alphaBuf === null ? function (_ignored: any) {
const getAlphaBuf = alphaBuf === null ? function (_ignored: any)
{
return 0xff;
} : function (i: any) {
} : function (i: any)
{
return alphaBuf[i];
};
for (var i = 0; i < pCount; ++i) {
for(let i = 0; i < pCount; ++i)
{
output[4 * i] = input[3 * i];
output[4 * i + 1] = input[3 * i + 1];
output[4 * i + 2] = input[3 * i + 2];
@ -150,8 +186,10 @@ export async function readImagesJPEG(code: number, tagData: any): Promise<any> {
enc.end(output);
}));
});
(_streamToArray2.default)(enc).then(function (parts: any) {
var buffers = parts.map(function (part: any) {
(_streamToArray2.default)(enc).then(function (parts: any)
{
const buffers = parts.map(function (part: any)
{
return Buffer.isBuffer(part) ? part : Buffer.from(part);
});
resolve({
@ -173,7 +211,8 @@ export interface ImageTagData {
bitmapHeight: number
}
export function readImagesDefineBitsLossless(tag: any) {
export function readImagesDefineBitsLossless(tag: any)
{
const characterId = tag.characterId,
bitmapFormat = tag.bitmapFormat,
bitmapWidth = tag.bitmapWidth,
@ -182,21 +221,27 @@ export function readImagesDefineBitsLossless(tag: any) {
zlibBitmapData = tag.zlibBitmapData;
return new Promise(function (resolve, reject) {
const enc = new _encoder2.default(bitmapWidth, bitmapHeight, {colorSpace: 'rgba'});
return new Promise(function (resolve, reject)
{
const enc = new _encoder2.default(bitmapWidth, bitmapHeight, { colorSpace: 'rgba' });
_zlib2.default.unzip(zlibBitmapData, function (err: any, dataBuf: any) {
if (err) {
_zlib2.default.unzip(zlibBitmapData, function (err: any, dataBuf: any)
{
if(err)
{
return reject(new Error(err));
}
var output = Buffer.alloc(bitmapWidth * bitmapHeight * 4);
var index = 0;
var ptr = 0;
if (bitmapFormat === 5) {
const output = Buffer.alloc(bitmapWidth * bitmapHeight * 4);
let index = 0;
let ptr = 0;
if(bitmapFormat === 5)
{
// 32-bit ARGB image
for (var y = 0; y < bitmapHeight; ++y) {
for (var x = 0; x < bitmapWidth; ++x) {
var alpha = dataBuf[ptr];
for(let y = 0; y < bitmapHeight; ++y)
{
for(let x = 0; x < bitmapWidth; ++x)
{
const alpha = dataBuf[ptr];
output[index] = dataBuf[ptr + 1] * (255 / alpha);
output[index + 1] = dataBuf[ptr + 2] * (255 / alpha);
output[index + 2] = dataBuf[ptr + 3] * (255 / alpha);
@ -205,17 +250,22 @@ export function readImagesDefineBitsLossless(tag: any) {
ptr += 4;
}
}
} else if (bitmapFormat === 3) {
}
else if(bitmapFormat === 3)
{
// 8-bit colormapped image
var colorMap = [];
for (var i = 0; i < bitmapColorTableSize + 1; ++i) {
const colorMap = [];
for(let i = 0; i < bitmapColorTableSize + 1; ++i)
{
colorMap.push([dataBuf[ptr], dataBuf[ptr + 1], dataBuf[ptr + 2], dataBuf[ptr + 3]]);
ptr += 4;
}
for (var _y2 = 0; _y2 < bitmapHeight; ++_y2) {
for (var _x2 = 0; _x2 < bitmapWidth; ++_x2) {
var idx = dataBuf[ptr];
var color = idx < colorMap.length ? colorMap[idx] : [0, 0, 0, 0];
for(let _y2 = 0; _y2 < bitmapHeight; ++_y2)
{
for(let _x2 = 0; _x2 < bitmapWidth; ++_x2)
{
const idx = dataBuf[ptr];
const color = idx < colorMap.length ? colorMap[idx] : [0, 0, 0, 0];
output[index] = color[0];
output[index + 1] = color[1];
output[index + 2] = color[2];
@ -226,14 +276,18 @@ export function readImagesDefineBitsLossless(tag: any) {
// skip padding
ptr += (4 - bitmapWidth % 4) % 4;
}
} else {
}
else
{
return reject(new Error('unhandled bitmapFormat: ' + bitmapFormat));
}
enc.end(output);
});
(_streamToArray2.default)(enc).then(function (parts: any) {
var buffers = parts.map(function (part: any) {
(_streamToArray2.default)(enc).then(function (parts: any)
{
const buffers = parts.map(function (part: any)
{
return Buffer.isBuffer(part) ? part : Buffer.from(part);
});
resolve({
@ -245,7 +299,8 @@ export function readImagesDefineBitsLossless(tag: any) {
bitmapHeight: bitmapHeight
});
});
}).catch(function (e) {
}).catch(function (e)
{
console.error(e);
});
}