Skip to content

Commit

Permalink
Update https-resolver.js
Browse files Browse the repository at this point in the history
Here’s an improved version of your Parcel resolver code with the suggested enhancements
  • Loading branch information
Yuva authored Dec 20, 2024
1 parent caf7456 commit e85f8db
Showing 1 changed file with 84 additions and 82 deletions.
166 changes: 84 additions & 82 deletions frontend-bundler/parcel-resolver-like-a-browser/https-resolver.js
Original file line number Diff line number Diff line change
@@ -1,124 +1,126 @@
let { Resolver } = require("@parcel/plugin")
let path = require("path")
let fs = require("fs/promises")
let { mkdirp } = require("mkdirp")
let { URL } = require("url")
let crypto = require("crypto")

let DONT_INCLUDE = { isExcluded: true }

const fileExists = async (path) => !!(await fs.stat(path).catch((e) => false))

async function keep_trying(fn, max_tries = 10) {
const { Resolver } = require("@parcel/plugin");
const path = require("path");
const fs = require("fs/promises");
const { mkdirp } = require("mkdirp");
const { URL } = require("url");
const crypto = require("crypto");

const DONT_INCLUDE = { isExcluded: true };
const MAX_RETRIES = 10;
const FETCH_TIMEOUT = 10000; // Timeout for fetching resources in milliseconds

// Utility to check if a file exists
const fileExists = async (filePath) => !!(await fs.stat(filePath).catch(() => false));

// Retry function with a maximum number of attempts
async function keepTrying(fn, maxRetries = MAX_RETRIES) {
try {
return await fn()
return await fn();
} catch (e) {
if (max_tries === 0) {
throw e
} else {
return await keep_trying(fn, max_tries - 1)
if (maxRetries === 0) {
throw e;
}
return await keepTrying(fn, maxRetries - 1);
}
}

// Main resolver logic
module.exports = new Resolver({
async resolve({ specifier, dependency, options }) {
let my_temp_cave = path.join(options.cacheDir, ".net")
const tempCacheDir = path.join(options.cacheDir, ".net");

// Wait before resolving to handle any asynchronous setup if needed
await new Promise((resolve) => setTimeout(resolve, FETCH_TIMEOUT));

await new Promise((resolve) => setTimeout(resolve, 10000))
// Handle CommonJS specifiers
if (dependency.specifierType === "commonjs") {
if (specifier === "process") {
return { filePath: "/dev/null.js", code: "" }
return { filePath: "/dev/null.js", code: "" };
}
if (specifier.startsWith("@parcel") || dependency.sourcePath.includes("node_modules/@parcel")) {
return null
return null;
}
console.error(`Unrecognized commonjs import:`, dependency)
return DONT_INCLUDE
console.error(`Unrecognized CommonJS import:`, dependency);
return DONT_INCLUDE;
}

// So yeah, our sample urls aren't real urls....
// Exclude unsupported "sample" imports
if (specifier.startsWith("sample")) {
return DONT_INCLUDE
return DONT_INCLUDE;
}

// Translate my cool directory structure into a real url
if (dependency.sourcePath?.startsWith?.(my_temp_cave)) {
let [protocol, hostname, ...path] = dependency.sourcePath.slice(my_temp_cave.length).slice(1).split("/")
let url_object = new URL(specifier, `${protocol}://${hostname}/${path.join("/")}`)
specifier = url_object.toString()
// Convert custom directory structures into valid URLs
if (dependency.sourcePath?.startsWith?.(tempCacheDir)) {
const [protocol, hostname, ...urlPath] = dependency.sourcePath
.slice(tempCacheDir.length)
.slice(1)
.split("/");
const urlObject = new URL(specifier, `${protocol}://${hostname}/${urlPath.join("/")}`);
specifier = urlObject.toString();
}

// Handle HTTP/HTTPS URLs
if (specifier.startsWith("https://") || specifier.startsWith("http://")) {
let url = new URL(specifier)

if (url.port !== "") throw new Error(`Port in urls not supported yet (${specifier})`)
if (url.hash !== "") throw new Error(`Hash in urls not supported yet (${specifier})`)
if (url.username !== "") throw new Error(`Username in urls not supported (${specifier})`)
if (url.password !== "") throw new Error(`Password in urls not supported (${specifier})`)

// If no file extension is given in the URL, guess one automatically.
let found_extension = /\.[a-zA-Z][a-zA-Z0-9]+$/.exec(url.pathname)?.[0]

let extension_to_add = found_extension ?? (dependency.specifierType === "esm" ? ".mjs" : "")

let search_component = ""
if (url.search !== "") {
search_component = "." + crypto.createHmac("sha256", "42").update(url.search).digest("hex").slice(0, 10)
}

// If a search is given in the URL, this will search be appended to the path, so we need to repeat the extension.
let should_add_extension = search_component !== "" || found_extension == null
let suffix = should_add_extension ? extension_to_add : ""

// Create a folder structure and file for the import. This folder structure will match the URL structure, to make sure that relative imports still work.
let filename_parts = (url.pathname.slice(1) + search_component + suffix).split("/")
let url_to_path = path.join(url.protocol.slice(0, -1), url.hostname, ...filename_parts)
let fullpath = path.join(my_temp_cave, url_to_path)
let folder = path.dirname(fullpath)

if (!(await fileExists(fullpath))) {
await keep_trying(async () => {
let response = await fetch(specifier)
const url = new URL(specifier);

// Validate URL
if (url.port) throw new Error(`Port in URLs not supported yet (${specifier})`);
if (url.hash) throw new Error(`Hash in URLs not supported yet (${specifier})`);
if (url.username || url.password) throw new Error(`Authentication in URLs not supported (${specifier})`);

// Guess or append file extension
const extensionMatch = /\.[a-zA-Z][a-zA-Z0-9]+$/.exec(url.pathname)?.[0];
const defaultExtension = dependency.specifierType === "esm" ? ".mjs" : "";
const extension = extensionMatch || defaultExtension;

const searchHash =
url.search !== ""
? "." + crypto.createHmac("sha256", "42").update(url.search).digest("hex").slice(0, 10)
: "";
const filePathParts = (url.pathname.slice(1) + searchHash + extension).split("/");
const resolvedPath = path.join(tempCacheDir, url.protocol.slice(0, -1), url.hostname, ...filePathParts);
const directory = path.dirname(resolvedPath);

// Fetch and cache the file if it doesn't already exist
if (!(await fileExists(resolvedPath))) {
await keepTrying(async () => {
const response = await fetch(specifier);
if (response.status !== 200) {
throw new Error(`${specifier} returned ${response.status}`)
throw new Error(`${specifier} returned ${response.status}`);
}
// Can't directly use the value from the request, as parcel really wants a string,
// and converting binary assets into strings and then passing them doesn't work 🤷‍♀️.
let buffer = await response.arrayBuffer()

const response_length = buffer.byteLength

if (response_length === 0) {
throw new Error(`${specifier} returned an empty reponse.`)
const buffer = await response.arrayBuffer();
if (buffer.byteLength === 0) {
throw new Error(`${specifier} returned an empty response.`);
}

await mkdirp(folder)
const write_result = await fs.writeFile(fullpath, Buffer.from(buffer))
await mkdirp(directory);
await fs.writeFile(resolvedPath, Buffer.from(buffer));

// Verify that the file was written correctly:
if (write_result !== undefined || (await fs.readFile(fullpath)).length !== response_length) {
throw new Error(`Failed to write file ${fullpath}`)
// Verify written content
const writtenContent = await fs.readFile(resolvedPath);
if (writtenContent.length !== buffer.byteLength) {
throw new Error(`Failed to write file ${resolvedPath}`);
}
})
});
}

return { filePath: fullpath }
return { filePath: resolvedPath };
}

// Handle ESM and URL specifiers
if (dependency.specifierType === "esm" || dependency.specifierType === "url") {
return {
filePath: path.join(path.dirname(dependency.sourcePath ?? "/"), specifier),
}
};
}

console.error(`Dependency unrecognized:`, {
// Log unrecognized dependencies and exclude
console.error(`Unrecognized dependency:`, {
specifier: dependency.specifier,
specifierType: dependency.specifierType,
sourcePath: dependency.sourcePath,
})

// This shouldn't lead to an error, but this does make the bundle unusable
return DONT_INCLUDE
});
return DONT_INCLUDE;
},
})
});

0 comments on commit e85f8db

Please sign in to comment.