Skip to content

Commit

Permalink
Refactor to make code more TypeScript compatible (#183)
Browse files Browse the repository at this point in the history
This change contains some refactors, mostly to make the code better
compatible with TypeScript.

All `require()` calls were changed to the form
`const identifier = require('specifier')`. This is compatible with
TypeScript’s `verbatimModuleSyntax` option, which is strongly
recommended in order to produce valid library types.

All uses of `lodash` were changed to native JavaScript implementations.
The `lodash` dependency was removed.

Static class assignments were changed to static class property syntax.

`util.promisify` usage was replaced with Node.js promise APIs.
  • Loading branch information
remcohaszing authored Aug 29, 2024
1 parent 48bfa46 commit b581334
Show file tree
Hide file tree
Showing 10 changed files with 87 additions and 88 deletions.
1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
"got": "^11.8.1",
"into-stream": "^6.0.0",
"is-stream": "^2.0.0",
"lodash": "^4.17.20",
"p-map": "^4.0.0",
"tus-js-client": "^3.0.1",
"uuid": "^8.3.2"
Expand Down
57 changes: 33 additions & 24 deletions src/Transloadit.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
const got = require('got')
const FormData = require('form-data')
const crypto = require('crypto')
const fromPairs = require('lodash/fromPairs')
const fs = require('fs')
const { access } = require('fs').promises
const log = require('debug')('transloadit')
const logWarn = require('debug')('transloadit:warn')
const fsPromises = require('fs/promises')
const debug = require('debug')
const intoStream = require('into-stream')
const isStream = require('is-stream')
const assert = require('assert')
Expand All @@ -14,9 +12,12 @@ const uuid = require('uuid')

const InconsistentResponseError = require('./InconsistentResponseError')
const PaginationStream = require('./PaginationStream')
const { version } = require('../package.json')
const pkg = require('../package.json')
const tus = require('./tus')

const log = debug('transloadit')
const logWarn = debug('transloadit:warn')

function decorateHttpError(err, body) {
if (!body) return err

Expand Down Expand Up @@ -70,6 +71,24 @@ function checkResult(result) {
}

class TransloaditClient {
// See https://github.com/sindresorhus/got#errors
// Expose relevant errors
static RequestError = got.RequestError

static ReadError = got.ReadError

static ParseError = got.ParseError

static UploadError = got.UploadError

static HTTPError = got.HTTPError

static MaxRedirectsError = got.MaxRedirectsError

static TimeoutError = got.TimeoutError

static InconsistentResponseError = InconsistentResponseError

constructor(opts = {}) {
if (opts.authKey == null) {
throw new Error('Please provide an authKey')
Expand Down Expand Up @@ -156,13 +175,15 @@ class TransloaditClient {
const promise = (async () => {
this._lastUsedAssemblyUrl = `${this._endpoint}${urlSuffix}`

// eslint-disable-next-line no-bitwise
await pMap(Object.entries(files), async ([, path]) => access(path, fs.F_OK | fs.R_OK), {
concurrency: 5,
})
await pMap(
Object.entries(files),
// eslint-disable-next-line no-bitwise
async ([, path]) => fsPromises.access(path, fs.constants.F_OK | fs.constants.R_OK),
{ concurrency: 5 }
)

// Convert uploads to streams
const streamsMap = fromPairs(
const streamsMap = Object.fromEntries(
Object.entries(uploads).map(([label, value]) => {
const isReadable = isStream.readable(value)
if (!isReadable && isStream(value)) {
Expand All @@ -175,7 +196,7 @@ class TransloaditClient {
)

// Wrap in object structure (so we can know if it's a pathless stream or not)
const allStreamsMap = fromPairs(
const allStreamsMap = Object.fromEntries(
Object.entries(streamsMap).map(([label, stream]) => [label, { stream }])
)

Expand Down Expand Up @@ -679,7 +700,7 @@ class TransloaditClient {
body: form,
timeout,
headers: {
'Transloadit-Client': `node-sdk:${version}`,
'Transloadit-Client': `node-sdk:${pkg.version}`,
'User-Agent': undefined, // Remove got's user-agent
...headers,
},
Expand Down Expand Up @@ -726,16 +747,4 @@ class TransloaditClient {
}
}

// See https://github.com/sindresorhus/got#errors
// Expose relevant errors
TransloaditClient.RequestError = got.RequestError
TransloaditClient.ReadError = got.ReadError
TransloaditClient.ParseError = got.ParseError
TransloaditClient.UploadError = got.UploadError
TransloaditClient.HTTPError = got.HTTPError
TransloaditClient.MaxRedirectsError = got.MaxRedirectsError
TransloaditClient.TimeoutError = got.TimeoutError

TransloaditClient.InconsistentResponseError = InconsistentResponseError

module.exports = TransloaditClient
18 changes: 11 additions & 7 deletions src/tus.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
const log = require('debug')('transloadit')
const sumBy = require('lodash/sumBy')
const { basename } = require('path')
const debug = require('debug')
const nodePath = require('path')
const tus = require('tus-js-client')
const { stat: fsStat } = require('fs').promises
const fsPromises = require('fs/promises')
const pMap = require('p-map')

const log = debug('transloadit')

async function sendTusRequest({
streamsMap,
assembly,
Expand All @@ -28,7 +29,7 @@ async function sendTusRequest({
const { path } = streamsMap[label]

if (path) {
const { size } = await fsStat(path)
const { size } = await fsPromises.stat(path)
sizes[label] = size
totalBytes += size
}
Expand Down Expand Up @@ -58,7 +59,10 @@ async function sendTusRequest({
uploadProgresses[label] = bytesUploaded

// get all uploaded bytes for all files
const uploadedBytes = sumBy(streamLabels, (l) => uploadProgresses[l])
let uploadedBytes = 0
for (const l of streamLabels) {
uploadedBytes += uploadProgresses[l]
}

// don't send redundant progress
if (lastEmittedProgress < uploadedBytes) {
Expand All @@ -72,7 +76,7 @@ async function sendTusRequest({
}
}

const filename = path ? basename(path) : label
const filename = path ? nodePath.basename(path) : label

await new Promise((resolve, reject) => {
const tusOptions = {
Expand Down
2 changes: 1 addition & 1 deletion test/generate-coverage-badge.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env node

const fs = require('fs').promises
const fs = require('fs/promises')
const { makeBadge } = require('badge-maker')

// eslint-disable-next-line import/newline-after-import
Expand Down
26 changes: 12 additions & 14 deletions test/integration/live-api.test.js
Original file line number Diff line number Diff line change
@@ -1,25 +1,22 @@
// https://github.com/axios/axios/issues/2654
const keyBy = require('lodash/keyBy')
const querystring = require('querystring')
const temp = require('temp')
const { createWriteStream } = require('fs')
const { join } = require('path')
const { promisify } = require('util')
const { pipeline: streamPipeline } = require('stream')
const fs = require('fs')
const nodePath = require('path')
const nodeStream = require('stream/promises')
const got = require('got')
const intoStream = require('into-stream')
const uuid = require('uuid')
const debug = require('debug')('transloadit:live-api')
const debug = require('debug')

const pipeline = promisify(streamPipeline)
const log = debug('transloadit:live-api')

const Transloadit = require('../../src/Transloadit')

const { createTestServer } = require('../testserver')

async function downloadTmpFile(url) {
const { path } = await temp.open('transloadit')
await pipeline(got.stream(url), createWriteStream(path))
await nodeStream.pipeline(got.stream(url), fs.createWriteStream(path))
return path
}

Expand Down Expand Up @@ -119,10 +116,10 @@ beforeAll(async () => {
if (req.url === '') req.url = '/'
handler(req, res)
} else {
debug('request handler for UUID not found', id)
log('request handler for UUID not found', id)
}
} else {
debug('Invalid path match', req.url)
log('Invalid path match', req.url)
}
})
}, 100000)
Expand All @@ -133,7 +130,7 @@ afterAll(async () => {

async function createVirtualTestServer(handler) {
const id = uuid.v4()
debug('Adding virtual server handler', id)
log('Adding virtual server handler', id)
const url = `${testServer.url}/${id}`
handlers.set(id, handler)

Expand Down Expand Up @@ -275,7 +272,8 @@ describe('API integration', { timeout: 30000 }, () => {
original_path: '/',
original_md5hash: '1b199e02dd833b2278ce2a0e75480b14',
})
const uploadsKeyed = keyBy(result.uploads, 'name') // Because order is not same as input
// Because order is not same as input
const uploadsKeyed = Object.fromEntries(result.uploads, (upload) => [upload.name, upload])
expect(uploadsKeyed.file1).toMatchObject(getMatchObject({ name: 'file1' }))

Check failure on line 277 in test/integration/live-api.test.js

View workflow job for this annotation

GitHub Actions / test (18)

test/integration/live-api.test.js > API integration > assembly creation > should allow adding different types

AssertionError: expected undefined to match object { name: 'file1', …(11) } - Expected: Object { "basename": "file1", "ext": "svg", "field": "file1", "md5hash": "1b199e02dd833b2278ce2a0e75480b14", "mime": "image/svg+xml", "name": "file1", "original_basename": "file1", "original_md5hash": "1b199e02dd833b2278ce2a0e75480b14", "original_name": "file1", "original_path": "/", "size": 117, "type": "image", } + Received: undefined ❯ test/integration/live-api.test.js:277:34
expect(uploadsKeyed.file2).toMatchObject(getMatchObject({ name: 'file2' }))
expect(uploadsKeyed.file3).toMatchObject(getMatchObject({ name: 'file3' }))
Expand Down Expand Up @@ -390,7 +388,7 @@ describe('API integration', { timeout: 30000 }, () => {
},
},
files: {
file: join(__dirname, './fixtures/zerobytes.jpg'),
file: nodePath.join(__dirname, './fixtures/zerobytes.jpg'),
},
waitForCompletion: true,
}
Expand Down
18 changes: 10 additions & 8 deletions test/testserver.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
const http = require('http')
const got = require('got')
const debug = require('debug')('transloadit:testserver')
const debug = require('debug')

const log = debug('transloadit:testserver')

const createTunnel = require('./tunnel')

Expand All @@ -13,7 +15,7 @@ async function createHttpServer(handler) {
// Find a free port to use
function tryListen() {
server.listen(port, '127.0.0.1', () => {
debug(`server listening on port ${port}`)
log(`server listening on port ${port}`)
resolve({ server, port })
})
}
Expand Down Expand Up @@ -45,7 +47,7 @@ async function createTestServer(onRequest) {
let tunnel

const handleHttpRequest = (req, res) => {
debug('HTTP request handler', req.method, req.url)
log('HTTP request handler', req.method, req.url)

if (!initialized) {
if (req.url !== expectedPath) throw new Error(`Unexpected path ${req.url}`)
Expand All @@ -62,17 +64,17 @@ async function createTestServer(onRequest) {
async function close() {
if (tunnel) await tunnel.close()
await new Promise((resolve) => server.close(() => resolve()))
debug('closed tunnel')
log('closed tunnel')
}

try {
tunnel = createTunnel({ cloudFlaredPath: process.env.CLOUDFLARED_PATH, port })

debug('waiting for tunnel to be created')
log('waiting for tunnel to be created')
const tunnelPublicUrl = await tunnel.urlPromise
debug('tunnel created', tunnelPublicUrl)
log('tunnel created', tunnelPublicUrl)

debug('Waiting for tunnel to allow requests to pass through')
log('Waiting for tunnel to allow requests to pass through')

// eslint-disable-next-line no-inner-declarations
async function sendTunnelRequest() {
Expand Down Expand Up @@ -100,7 +102,7 @@ async function createTestServer(onRequest) {
sendTunnelRequest(),
])

debug('Tunnel ready', tunnelPublicUrl)
log('Tunnel ready', tunnelPublicUrl)

return {
port,
Expand Down
24 changes: 13 additions & 11 deletions test/tunnel.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
const execa = require('execa')
const readline = require('readline')
const { Resolver } = require('dns')
const { promisify } = require('util')
const debug = require('debug')('transloadit:cloudflared-tunnel')
const dns = require('dns/promises')
const debug = require('debug')
const pRetry = require('p-retry')

const log = debug('transloadit:cloudflared-tunnel')

async function startTunnel({ cloudFlaredPath, port }) {
const process = execa(
cloudFlaredPath,
Expand Down Expand Up @@ -39,7 +40,7 @@ async function startTunnel({ cloudFlaredPath, port }) {
]

rl.on('line', (line) => {
debug(line)
log(line)
fullStderr += `${line}\n`

if (
Expand Down Expand Up @@ -72,31 +73,30 @@ async function startTunnel({ cloudFlaredPath, port }) {
}
}

module.exports = ({ cloudFlaredPath = 'cloudflared', port }) => {
function createTunnel({ cloudFlaredPath = 'cloudflared', port }) {
let process

const urlPromise = (async () => {
const tunnel = await pRetry(async () => startTunnel({ cloudFlaredPath, port }), { retries: 1 })
;({ process } = tunnel)
const { url } = tunnel

debug('Found url', url)
log('Found url', url)

// We need to wait for DNS to be resolvable.
// If we don't, the operating system's dns cache will be poisoned by the not yet valid resolved entry
// and it will forever fail for that subdomain name...
const resolver = new Resolver()
const resolver = new dns.Resolver()
resolver.setServers(['1.1.1.1']) // use cloudflare's dns server. if we don't explicitly specify DNS server, it will also poison our OS' dns cache
const resolve4 = promisify(resolver.resolve4.bind(resolver))

for (let i = 0; i < 10; i += 1) {
try {
const host = new URL(url).hostname
debug('checking dns', host)
await resolve4(host)
log('checking dns', host)
await resolver.resolve4(host)
return url
} catch (err) {
debug('dns err', err.message)
log('dns err', err.message)
await new Promise((resolve) => setTimeout(resolve, 3000))
}
}
Expand All @@ -117,3 +117,5 @@ module.exports = ({ cloudFlaredPath = 'cloudflared', port }) => {
close,
}
}

module.exports = createTunnel
Loading

0 comments on commit b581334

Please sign in to comment.