commit e3dccc3a3dbcfd0de32df3c50ca1e49f03c46ca1 Author: mc007 Date: Mon Aug 9 11:42:01 2021 +0200 init diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000..954d710 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,26 @@ +version: 2 +jobs: + build: + docker: + - image: circleci/node:latest + + working_directory: ~/repo + + steps: + - checkout + + - restore_cache: + keys: + - v1-dependencies-{{ checksum "package.json" }} + # fallback to using the latest cache if no exact match is found + - v1-dependencies- + + - run: yarn + + - save_cache: + paths: + - node_modules + key: v1-dependencies-{{ checksum "package.json" }} + + - run: yarn test + diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..a2be815 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,37 @@ +root = true + +[*] +indent_style = tab +indent_size = 4 +tab_width = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[{*.json,*.json.example,*.gyp,*.yml,*.yaml}] +indent_style = space +indent_size = 2 + +[{*.py,*.asm}] +indent_style = space + +[*.py] +indent_size = 4 + +[*.asm] +indent_size = 8 + +[*.md] +trim_trailing_whitespace = false + +# Ideal settings - some plugins might support these. +[*.js] +quote_type = single + +[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.d,*.cs,*.swift}] +curly_bracket_next_line = false +spaces_around_operators = true +spaces_around_brackets = outside +# close enough to 1TB +indent_brace_style = K&R diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..cc2eb5f --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +node_modules +*.log +.nyc_output +package-lock.json diff --git a/.yarnrc b/.yarnrc new file mode 100644 index 0000000..fdd705c --- /dev/null +++ b/.yarnrc @@ -0,0 +1 @@ +save-prefix "" diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..35a1a21 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 ZEIT, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..4637967 --- /dev/null +++ b/README.md @@ -0,0 +1,87 @@ +![](https://assets.vercel.com/image/upload/v1527770721/repositories/serve/serve-repo-banner.png) + + + + + +[![Build Status](https://circleci.com/gh/vercel/serve.svg?&style=shield)](https://circleci.com/gh/vercel/serve) +[![Install Size](https://packagephobia.now.sh/badge?p=serve)](https://packagephobia.now.sh/result?p=serve) + +Assuming you would like to serve a static site, single page application or just a static file (no matter if on your device or on the local network), this package is just the right choice for you. + +Once it's time to push your site to production, we recommend using [Vercel](https://vercel.com). + +In general, `serve` also provides a neat interface for listing the directory's contents: + +![screenshot](https://user-images.githubusercontent.com/6170607/40541195-167ff460-601b-11e8-8f66-3b0c7ff96cbb.png) + +## Usage + +The quickest way to get started is to just run `npx serve` in your project's directory. + +If you prefer, you can also install the package globally using [Yarn](https://yarnpkg.com/en/) (you'll need at least [Node.js LTS](https://nodejs.org/en/)): + +```bash +yarn global add serve +``` + +Once that's done, you can run this command inside your project's directory... + +```bash +serve +``` + +...or specify which folder you want to serve: + +```bash +serve folder_name +``` + +Finally, run this command to see a list of all available options: + +```bash +serve --help +``` + +Now you understand how the package works! :tada: + +## Configuration + +To customize `serve`'s behavior, create a `serve.json` file in the public folder and insert any of [these properties](https://github.com/vercel/serve-handler#options). + +## API + +The core of `serve` is [serve-handler](https://github.com/vercel/serve-handler), which can be used as middleware in existing HTTP servers: + +```js +const handler = require('serve-handler'); +const http = require('http'); + +const server = http.createServer((request, response) => { + // You pass two more arguments for config and middleware + // More details here: https://github.com/vercel/serve-handler#options + return handler(request, response); +}) + +server.listen(3000, () => { + console.log('Running at http://localhost:3000'); +}); +``` + +**NOTE:** You can also replace `http.createServer` with [micro](https://github.com/vercel/micro), if you want. + +## Contributing + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Uninstall `serve` if it's already installed: `npm uninstall -g serve` +3. Link it to the global module directory: `npm link` + +After that, you can use the `serve` command everywhere. [Here](https://github.com/vercel/serve/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+for+beginners%22)'s a list of issues that are great for beginners. + +## Credits + +This project used to be called "list" and "micro-list". But thanks to [TJ Holowaychuk](https://github.com/tj) handing us the new name, it's now called "serve" (which is much more definite). + +## Author + +Leo Lamprecht ([@notquiteleo](https://twitter.com/notquiteleo)) - [Vercel](https://vercel.com) diff --git a/package.json b/package.json new file mode 100644 index 0000000..b34f5a7 --- /dev/null +++ b/package.json @@ -0,0 +1,65 @@ +{ + "name": "@plastichub/serve", + "version": "12.0.0", + "description": "Static file serving and directory listing", + "scripts": { + "test": "yarn lint", + "lint": "zeit-eslint --ext .jsx,.js .", + "lint-staged": "git diff --diff-filter=ACMRT --cached --name-only '*.js' '*.jsx' | xargs zeit-eslint", + "build-views": "dottojs -s ./src -d ./src" + }, + "files": [ + "bin" + ], + "repository": "vercel/serve", + "bin": { + "serve": "./bin/serve.js" + }, + "keywords": [ + "now", + "serve", + "micro", + "http-server" + ], + "author": "leo", + "license": "MIT", + "devDependencies": { + "@zeit/eslint-config-node": "0.3.0", + "ava": "^3.15.0", + "codecov": "3.7.1", + "commander": "2.15.1", + "dot": "1.1.3", + "eslint": "6.1.0", + "fs-extra": "6.0.1", + "micro": "9.3.2", + "node-fetch": "2.6.1", + "nyc": "14.1.1", + "request": "2.87.0", + "sleep-promise": "6.0.0", + "test-listen": "1.1.0" + }, + "eslintConfig": { + "extends": [ + "@zeit/eslint-config-node" + ] + }, + "git": {}, + "dependencies": { + "@zeit/schemas": "2.6.0", + "ajv": "6.12.6", + "arg": "2.0.0", + "boxen": "1.3.0", + "bytes": "3.0.0", + "chalk": "2.4.1", + "clipboardy": "2.3.0", + "compression": "1.7.3", + "content-disposition": "0.5.2", + "fast-url-parser": "1.1.3", + "mime-types": "2.1.18", + "minimatch": "3.0.4", + "path-is-inside": "1.0.2", + "path-to-regexp": "2.2.1", + "range-parser": "1.2.0", + "update-check": "1.5.2" + } +} diff --git a/src/directory.js b/src/directory.js new file mode 100644 index 0000000..3c6c524 --- /dev/null +++ b/src/directory.js @@ -0,0 +1,16 @@ +(function(){function directory(it +) { +var encodeHTML = typeof _encodeHTML !== 'undefined' ? _encodeHTML : (function(doNotSkipEncoded) { + var encodeHTMLRules = { "&": "&", "<": "<", ">": ">", '"': """, "'": "'", "/": "/" }, + matchHTML = doNotSkipEncoded ? /[&<>"'\/]/g : /&(?!#?\w+;)|<|>|"|'|\//g; + return function(code) { + return code ? code.toString().replace(matchHTML, function(m) {return encodeHTMLRules[m] || m;}) : ""; + }; + }());var out=' Files within '+encodeHTML(it.directory)+'

Index of  ';var arr1=it.paths;if(arr1){var value,index=-1,l1=arr1.length-1;while(index'+encodeHTML(value.name)+' ';} } out+='

';return out; +}var itself=directory, _encodeHTML=(function(doNotSkipEncoded) { + var encodeHTMLRules = { "&": "&", "<": "<", ">": ">", '"': """, "'": "'", "/": "/" }, + matchHTML = doNotSkipEncoded ? /[&<>"'\/]/g : /&(?!#?\w+;)|<|>|"|'|\//g; + return function(code) { + return code ? code.toString().replace(matchHTML, function(m) {return encodeHTMLRules[m] || m;}) : ""; + }; + }());if(typeof module!=='undefined' && module.exports) module.exports=itself;else if(typeof define==='function')define(function(){return itself;});else {window.render=window.render||{};window.render['directory']=itself;}}()); \ No newline at end of file diff --git a/src/directory.jst b/src/directory.jst new file mode 100644 index 0000000..844207b --- /dev/null +++ b/src/directory.jst @@ -0,0 +1,159 @@ + + + + + + + Files within {{!it.directory}} + + + + + +
+
+

+ Index of  + + {{~it.paths :value:index}} + {{!value.name}} + {{~}} +

+
+ + +
+ + diff --git a/src/error.js b/src/error.js new file mode 100644 index 0000000..d8646be --- /dev/null +++ b/src/error.js @@ -0,0 +1,10 @@ +(function(){function error(it +) { +var out='
'+(it.statusCode)+'

'+(it.message)+'

';return out; +}var itself=error, _encodeHTML=(function(doNotSkipEncoded) { + var encodeHTMLRules = { "&": "&", "<": "<", ">": ">", '"': """, "'": "'", "/": "/" }, + matchHTML = doNotSkipEncoded ? /[&<>"'\/]/g : /&(?!#?\w+;)|<|>|"|'|\//g; + return function(code) { + return code ? code.toString().replace(matchHTML, function(m) {return encodeHTMLRules[m] || m;}) : ""; + }; + }());if(typeof module!=='undefined' && module.exports) module.exports=itself;else if(typeof define==='function')define(function(){return itself;});else {window.render=window.render||{};window.render['error']=itself;}}()); \ No newline at end of file diff --git a/src/error.jst b/src/error.jst new file mode 100644 index 0000000..3f6aa84 --- /dev/null +++ b/src/error.jst @@ -0,0 +1,119 @@ + + + + + + + + + +
+
+ {{=it.statusCode}} +

{{=it.message}}

+
+
+ diff --git a/src/glob-slash.js b/src/glob-slash.js new file mode 100644 index 0000000..08187e4 --- /dev/null +++ b/src/glob-slash.js @@ -0,0 +1,9 @@ +/* ! The MIT License (MIT) Copyright (c) 2014 Scott Corgan */ + +// This is adopted from https://github.com/scottcorgan/glob-slash/ + +const path = require('path'); +const normalize = value => path.posix.normalize(path.posix.join('/', value)); + +module.exports = value => (value.charAt(0) === '!' ? `!${normalize(value.substr(1))}` : normalize(value)); +module.exports.normalize = normalize; diff --git a/src/serve.js b/src/serve.js new file mode 100644 index 0000000..2fbcd88 --- /dev/null +++ b/src/serve.js @@ -0,0 +1,462 @@ +#!/usr/bin/env node + +// Native +const http = require('http'); +const https = require('https'); +const path = require('path'); +const fs = require('fs'); +const {promisify} = require('util'); +const {parse} = require('url'); +const os = require('os'); + +// Packages +const Ajv = require('ajv'); +const checkForUpdate = require('update-check'); +const chalk = require('chalk'); +const arg = require('arg'); +const {write: copy} = require('clipboardy'); +const handler = require('./server-handler'); +const schema = require('@zeit/schemas/deployment/config-static'); +const boxen = require('boxen'); +const compression = require('compression'); + +// Utilities +const pkg = require('../package'); + +const readFile = promisify(fs.readFile); +const compressionHandler = promisify(compression()); + +const interfaces = os.networkInterfaces(); + +const warning = (message) => chalk`{yellow WARNING:} ${message}`; +const info = (message) => chalk`{magenta INFO:} ${message}`; +const error = (message) => chalk`{red ERROR:} ${message}`; + +const updateCheck = async (isDebugging) => { + let update = null; + + try { + update = await checkForUpdate(pkg); + } catch (err) { + const suffix = isDebugging ? ':' : ' (use `--debug` to see full error)'; + console.error(warning(`Checking for updates failed${suffix}`)); + + if (isDebugging) { + console.error(err); + } + } + + if (!update) { + return; + } + + console.log(`${chalk.bgRed('UPDATE AVAILABLE')} The latest version of \`serve\` is ${update.latest}`); +}; + +const getHelp = () => chalk` + {bold.cyan serve} - Static file serving and directory listing + + {bold USAGE} + + {bold $} {cyan serve} --help + {bold $} {cyan serve} --version + {bold $} {cyan serve} folder_name + {bold $} {cyan serve} [-l {underline listen_uri} [-l ...]] [{underline directory}] + + By default, {cyan serve} will listen on {bold 0.0.0.0:5000} and serve the + current working directory on that address. + + Specifying a single {bold --listen} argument will overwrite the default, not supplement it. + + {bold OPTIONS} + + --help Shows this help message + + -v, --version Displays the current version of serve + + -l, --listen {underline listen_uri} Specify a URI endpoint on which to listen (see below) - + more than one may be specified to listen in multiple places + + -p Specify custom port + + -d, --debug Show debugging information + + -s, --single Rewrite all not-found requests to \`index.html\` + + -c, --config Specify custom path to \`serve.json\` + + -C, --cors Enable CORS, sets \`Access-Control-Allow-Origin\` to \`*\` + + -n, --no-clipboard Do not copy the local address to the clipboard + + -u, --no-compression Do not compress files + + --no-etag Send \`Last-Modified\` header instead of \`ETag\` + + -S, --symlinks Resolve symlinks instead of showing 404 errors + + --ssl-cert Optional path to an SSL/TLS certificate to serve with HTTPS + + --ssl-key Optional path to the SSL/TLS certificate\'s private key + + --ssl-pass Optional path to the SSL/TLS certificate\'s passphrase + + --no-port-switching Do not open a port other than the one specified when it\'s taken. + + {bold ENDPOINTS} + + Listen endpoints (specified by the {bold --listen} or {bold -l} options above) instruct {cyan serve} + to listen on one or more interfaces/ports, UNIX domain sockets, or Windows named pipes. + + For TCP ports on hostname "localhost": + + {bold $} {cyan serve} -l {underline 1234} + + For TCP (traditional host/port) endpoints: + + {bold $} {cyan serve} -l tcp://{underline hostname}:{underline 1234} + + For UNIX domain socket endpoints: + + {bold $} {cyan serve} -l unix:{underline /path/to/socket.sock} + + For Windows named pipe endpoints: + + {bold $} {cyan serve} -l pipe:\\\\.\\pipe\\{underline PipeName} +`; + +const parseEndpoint = (str) => { + if (!isNaN(str)) { + return [str]; + } + + // We cannot use `new URL` here, otherwise it will not + // parse the host properly and it would drop support for IPv6. + const url = parse(str); + + switch (url.protocol) { + case 'pipe:': { + // some special handling + const cutStr = str.replace(/^pipe:/, ''); + + if (cutStr.slice(0, 4) !== '\\\\.\\') { + throw new Error(`Invalid Windows named pipe endpoint: ${str}`); + } + + return [cutStr]; + } + case 'unix:': + if (!url.pathname) { + throw new Error(`Invalid UNIX domain socket endpoint: ${str}`); + } + + return [url.pathname]; + case 'tcp:': + url.port = url.port || '5000'; + return [parseInt(url.port, 10), url.hostname]; + default: + throw new Error(`Unknown --listen endpoint scheme (protocol): ${url.protocol}`); + } +}; + +const registerShutdown = (fn) => { + let run = false; + + const wrapper = () => { + if (!run) { + run = true; + fn(); + } + }; + + process.on('SIGINT', wrapper); + process.on('SIGTERM', wrapper); + process.on('exit', wrapper); +}; + +const getNetworkAddress = () => { + for (const name of Object.keys(interfaces)) { + for (const interface of interfaces[name]) { + const {address, family, internal} = interface; + if (family === 'IPv4' && !internal) { + return address; + } + } + } +}; + +const startEndpoint = (endpoint, config, args, previous) => { + const {isTTY} = process.stdout; + const clipboard = args['--no-clipboard'] !== true; + const compress = args['--no-compression'] !== true; + const httpMode = args['--ssl-cert'] && args['--ssl-key'] ? 'https' : 'http'; + + const serverHandler = async (request, response) => { + if (args['--cors']) { + response.setHeader('Access-Control-Allow-Origin', '*'); + } + if (compress) { + await compressionHandler(request, response); + } + + return handler(request, response, config); + }; + + const sslPass = args['--ssl-pass']; + + const server = httpMode === 'https' + ? https.createServer({ + key: fs.readFileSync(args['--ssl-key']), + cert: fs.readFileSync(args['--ssl-cert']), + passphrase: sslPass ? fs.readFileSync(sslPass) : '' + }, serverHandler) + : http.createServer(serverHandler); + + server.on('error', (err) => { + if (err.code === 'EADDRINUSE' && endpoint.length === 1 && !isNaN(endpoint[0]) && args['--no-port-switching'] !== true) { + startEndpoint([0], config, args, endpoint[0]); + return; + } + + console.error(error(`Failed to serve: ${err.stack}`)); + process.exit(1); + }); + + server.listen(...endpoint, async () => { + const details = server.address(); + registerShutdown(() => server.close()); + + let localAddress = null; + let networkAddress = null; + + if (typeof details === 'string') { + localAddress = details; + } else if (typeof details === 'object' && details.port) { + const address = details.address === '::' ? 'localhost' : details.address; + const ip = getNetworkAddress(); + + localAddress = `${httpMode}://${address}:${details.port}`; + networkAddress = networkAddress ? `${httpMode}://${ip}:${details.port}` : null; + } + + if (isTTY && process.env.NODE_ENV !== 'production') { + let message = chalk.green('Serving!'); + + if (localAddress) { + const prefix = networkAddress ? '- ' : ''; + const space = networkAddress ? ' ' : ' '; + + message += `\n\n${chalk.bold(`${prefix}Local:`)}${space}${localAddress}`; + } + + if (networkAddress) { + message += `\n${chalk.bold('- On Your Network:')} ${networkAddress}`; + } + + if (previous) { + message += chalk.red(`\n\nThis port was picked because ${chalk.underline(previous)} is in use.`); + } + + if (clipboard) { + try { + await copy(localAddress); + message += `\n\n${chalk.grey('Copied local address to clipboard!')}`; + } catch (err) { + console.error(error(`Cannot copy to clipboard: ${err.message}`)); + } + } + + console.log(boxen(message, { + padding: 1, + borderColor: 'green', + margin: 1 + })); + } else { + const suffix = localAddress ? ` at ${localAddress}` : ''; + console.log(info(`Accepting connections${suffix}`)); + } + }); +}; + +const loadConfig = async (cwd, entry, args) => { + const files = [ + 'serve.json', + 'now.json', + 'package.json' + ]; + + if (args['--config']) { + files.unshift(args['--config']); + } + + const config = {}; + + for (const file of files) { + const location = path.resolve(entry, file); + let content = null; + + try { + content = await readFile(location, 'utf8'); + } catch (err) { + if (err.code === 'ENOENT') { + continue; + } + + console.error(error(`Not able to read ${location}: ${err.message}`)); + process.exit(1); + } + + try { + content = JSON.parse(content); + } catch (err) { + console.error(error(`Could not parse ${location} as JSON: ${err.message}`)); + process.exit(1); + } + + if (typeof content !== 'object') { + console.error(warning(`Didn't find a valid object in ${location}. Skipping...`)); + continue; + } + + try { + switch (file) { + case 'now.json': + content = content.static; + break; + case 'package.json': + content = content.now.static; + break; + } + } catch (err) { + continue; + } + + Object.assign(config, content); + console.log(info(`Discovered configuration in \`${file}\``)); + + if (file === 'now.json' || file === 'package.json') { + console.error(warning('The config files `now.json` and `package.json` are deprecated. Please use `serve.json`.')); + } + + break; + } + + if (entry) { + const {public} = config; + config.public = path.relative(cwd, (public ? path.resolve(entry, public) : entry)); + } + + if (Object.keys(config).length !== 0) { + const ajv = new Ajv(); + const validateSchema = ajv.compile(schema); + + if (!validateSchema(config)) { + const defaultMessage = error('The configuration you provided is wrong:'); + const {message, params} = validateSchema.errors[0]; + + console.error(`${defaultMessage}\n${message}\n${JSON.stringify(params)}`); + process.exit(1); + } + } + + // "ETag" headers are enabled by default unless `--no-etag` is provided + config.etag = !args['--no-etag']; + + return config; +}; + +(async () => { + let args = null; + + try { + args = arg({ + '--help': Boolean, + '--version': Boolean, + '--listen': [parseEndpoint], + '--single': Boolean, + '--debug': Boolean, + '--config': String, + '--no-clipboard': Boolean, + '--no-compression': Boolean, + '--no-etag': Boolean, + '--symlinks': Boolean, + '--cors': Boolean, + '--no-port-switching': Boolean, + '--ssl-cert': String, + '--ssl-key': String, + '--ssl-pass': String, + '-h': '--help', + '-v': '--version', + '-l': '--listen', + '-s': '--single', + '-d': '--debug', + '-c': '--config', + '-n': '--no-clipboard', + '-u': '--no-compression', + '-S': '--symlinks', + '-C': '--cors', + // This is deprecated and only for backwards-compatibility. + '-p': '--listen' + }); + } catch (err) { + console.error(error(err.message)); + process.exit(1); + } + + if (process.env.NO_UPDATE_CHECK !== '1') { + await updateCheck(args['--debug']); + } + + if (args['--version']) { + console.log(pkg.version); + return; + } + + if (args['--help']) { + console.log(getHelp()); + return; + } + + if (!args['--listen']) { + // Default endpoint + args['--listen'] = [[process.env.PORT || 5000]]; + } + + if (args._.length > 1) { + console.error(error('Please provide one path argument at maximum')); + process.exit(1); + } + + const cwd = process.cwd(); + const entry = args._.length > 0 ? path.resolve(args._[0]) : cwd; + + const config = await loadConfig(cwd, entry, args); + + if (args['--single']) { + const {rewrites} = config; + const existingRewrites = Array.isArray(rewrites) ? rewrites : []; + + // As the first rewrite rule, make `--single` work + config.rewrites = [{ + source: '**', + destination: '/index.html' + }, ...existingRewrites]; + } + + if (args['--symlinks']) { + config.symlinks = true; + } + + for (const endpoint of args['--listen']) { + startEndpoint(endpoint, config, args); + } + + registerShutdown(() => { + console.log(`\n${info('Gracefully shutting down. Please wait...')}`); + + process.on('SIGINT', () => { + console.log(`\n${warning('Force-closing all open sockets...')}`); + process.exit(0); + }); + }); +})(); diff --git a/src/server-handler.js b/src/server-handler.js new file mode 100644 index 0000000..05e3430 --- /dev/null +++ b/src/server-handler.js @@ -0,0 +1,756 @@ +// Native +const {promisify} = require('util'); +const path = require('path'); +const {createHash} = require('crypto'); +const {realpath, lstat, createReadStream, readdir} = require('fs'); + +// Packages +const url = require('fast-url-parser'); +const slasher = require('./glob-slash'); +const minimatch = require('minimatch'); +const pathToRegExp = require('path-to-regexp'); +const mime = require('mime-types'); +const bytes = require('bytes'); +const contentDisposition = require('content-disposition'); +const isPathInside = require('path-is-inside'); +const parseRange = require('range-parser'); + +// Other +const directoryTemplate = require('./directory'); +const errorTemplate = require('./error'); + +const etags = new Map(); + +const calculateSha = (handlers, absolutePath) => + new Promise((resolve, reject) => { + const hash = createHash('sha1'); + hash.update(path.extname(absolutePath)); + hash.update('-'); + const rs = handlers.createReadStream(absolutePath); + rs.on('error', reject); + rs.on('data', buf => hash.update(buf)); + rs.on('end', () => { + const sha = hash.digest('hex'); + resolve(sha); + }); + }); + +const sourceMatches = (source, requestPath, allowSegments) => { + const keys = []; + const slashed = slasher(source); + const resolvedPath = path.posix.resolve(requestPath); + + let results = null; + + if (allowSegments) { + const normalized = slashed.replace('*', '(.*)'); + const expression = pathToRegExp(normalized, keys); + + results = expression.exec(resolvedPath); + + if (!results) { + // clear keys so that they are not used + // later with empty results. this may + // happen if minimatch returns true + keys.length = 0; + } + } + + if (results || minimatch(resolvedPath, slashed)) { + return { + keys, + results + }; + } + + return null; +}; + +const toTarget = (source, destination, previousPath) => { + const matches = sourceMatches(source, previousPath, true); + + if (!matches) { + return null; + } + + const {keys, results} = matches; + + const props = {}; + const {protocol} = url.parse(destination); + const normalizedDest = protocol ? destination : slasher(destination); + const toPath = pathToRegExp.compile(normalizedDest); + + for (let index = 0; index < keys.length; index++) { + const {name} = keys[index]; + props[name] = results[index + 1]; + } + + return toPath(props); +}; + +const applyRewrites = (requestPath, rewrites = [], repetitive) => { + // We need to copy the array, since we're going to modify it. + const rewritesCopy = rewrites.slice(); + + // If the method was called again, the path was already rewritten + // so we need to make sure to return it. + const fallback = repetitive ? requestPath : null; + + if (rewritesCopy.length === 0) { + return fallback; + } + + for (let index = 0; index < rewritesCopy.length; index++) { + const {source, destination} = rewrites[index]; + const target = toTarget(source, destination, requestPath); + + if (target) { + // Remove rules that were already applied + rewritesCopy.splice(index, 1); + + // Check if there are remaining ones to be applied + return applyRewrites(slasher(target), rewritesCopy, true); + } + } + + return fallback; +}; + +const ensureSlashStart = target => (target.startsWith('/') ? target : `/${target}`); + +const shouldRedirect = (decodedPath, {redirects = [], trailingSlash}, cleanUrl) => { + const slashing = typeof trailingSlash === 'boolean'; + const defaultType = 301; + const matchHTML = /(\.html|\/index)$/g; + + if (redirects.length === 0 && !slashing && !cleanUrl) { + return null; + } + + // By stripping the HTML parts from the decoded + // path *before* handling the trailing slash, we make + // sure that only *one* redirect occurs if both + // config options are used. + if (cleanUrl && matchHTML.test(decodedPath)) { + decodedPath = decodedPath.replace(matchHTML, ''); + if (decodedPath.indexOf('//') > -1) { + decodedPath = decodedPath.replace(/\/+/g, '/'); + } + return { + target: ensureSlashStart(decodedPath), + statusCode: defaultType + }; + } + + if (slashing) { + const {ext, name} = path.parse(decodedPath); + const isTrailed = decodedPath.endsWith('/'); + const isDotfile = name.startsWith('.'); + + let target = null; + + if (!trailingSlash && isTrailed) { + target = decodedPath.slice(0, -1); + } else if (trailingSlash && !isTrailed && !ext && !isDotfile) { + target = `${decodedPath}/`; + } + + if (decodedPath.indexOf('//') > -1) { + target = decodedPath.replace(/\/+/g, '/'); + } + + if (target) { + return { + target: ensureSlashStart(target), + statusCode: defaultType + }; + } + } + + // This is currently the fastest way to + // iterate over an array + for (let index = 0; index < redirects.length; index++) { + const {source, destination, type} = redirects[index]; + const target = toTarget(source, destination, decodedPath); + + if (target) { + return { + target, + statusCode: type || defaultType + }; + } + } + + return null; +}; + +const appendHeaders = (target, source) => { + for (let index = 0; index < source.length; index++) { + const {key, value} = source[index]; + target[key] = value; + } +}; + +const getHeaders = async (handlers, config, current, absolutePath, stats) => { + const {headers: customHeaders = [], etag = false} = config; + const related = {}; + const {base} = path.parse(absolutePath); + const relativePath = path.relative(current, absolutePath); + + if (customHeaders.length > 0) { + // By iterating over all headers and never stopping, developers + // can specify multiple header sources in the config that + // might match a single path. + for (let index = 0; index < customHeaders.length; index++) { + const {source, headers} = customHeaders[index]; + + if (sourceMatches(source, slasher(relativePath))) { + appendHeaders(related, headers); + } + } + } + + let defaultHeaders = {}; + + if (stats) { + defaultHeaders = { + 'Content-Length': stats.size, + // Default to "inline", which always tries to render in the browser, + // if that's not working, it will save the file. But to be clear: This + // only happens if it cannot find a appropiate value. + 'Content-Disposition': contentDisposition(base, { + type: 'inline' + }), + 'Accept-Ranges': 'bytes' + }; + + if (etag) { + let [mtime, sha] = etags.get(absolutePath) || []; + if (Number(mtime) !== Number(stats.mtime)) { + sha = await calculateSha(handlers, absolutePath); + etags.set(absolutePath, [stats.mtime, sha]); + } + defaultHeaders['ETag'] = `"${sha}"`; + } else { + defaultHeaders['Last-Modified'] = stats.mtime.toUTCString(); + } + + const contentType = mime.contentType(base); + + if (contentType) { + defaultHeaders['Content-Type'] = contentType; + } + } + + const headers = Object.assign(defaultHeaders, related); + + for (const key in headers) { + if (headers.hasOwnProperty(key) && headers[key] === null) { + delete headers[key]; + } + } + + return headers; +}; + +const applicable = (decodedPath, configEntry) => { + if (typeof configEntry === 'boolean') { + return configEntry; + } + + if (Array.isArray(configEntry)) { + for (let index = 0; index < configEntry.length; index++) { + const source = configEntry[index]; + + if (sourceMatches(source, decodedPath)) { + return true; + } + } + + return false; + } + + return true; +}; + +const getPossiblePaths = (relativePath, extension) => [ + path.join(relativePath, `index${extension}`), + relativePath.endsWith('/') ? relativePath.replace(/\/$/g, extension) : (relativePath + extension) +].filter(item => path.basename(item) !== extension); + +const findRelated = async (current, relativePath, rewrittenPath, originalStat) => { + const possible = rewrittenPath ? [rewrittenPath] : getPossiblePaths(relativePath, '.html'); + + let stats = null; + + for (let index = 0; index < possible.length; index++) { + const related = possible[index]; + const absolutePath = path.join(current, related); + + try { + stats = await originalStat(absolutePath); + } catch (err) { + if (err.code !== 'ENOENT' && err.code !== 'ENOTDIR') { + throw err; + } + } + + if (stats) { + return { + stats, + absolutePath + }; + } + } + + return null; +}; + +const canBeListed = (excluded, file) => { + const slashed = slasher(file); + let whether = true; + + for (let mark = 0; mark < excluded.length; mark++) { + const source = excluded[mark]; + + if (sourceMatches(source, slashed)) { + whether = false; + break; + } + } + + return whether; +}; + +const renderDirectory = async (current, acceptsJSON, handlers, methods, config, paths) => { + const {directoryListing, trailingSlash, unlisted = [], renderSingle} = config; + const slashSuffix = typeof trailingSlash === 'boolean' ? (trailingSlash ? '/' : '') : '/'; + const {relativePath, absolutePath} = paths; + + const excluded = [ + '.DS_Store', + '.git', + ...unlisted + ]; + + if (!applicable(relativePath, directoryListing) && !renderSingle) { + return {}; + } + + let files = await handlers.readdir(absolutePath); + + const canRenderSingle = renderSingle && (files.length === 1); + + for (let index = 0; index < files.length; index++) { + const file = files[index]; + + const filePath = path.resolve(absolutePath, file); + const details = path.parse(filePath); + + // It's important to indicate that the `stat` call was + // spawned by the directory listing, as Now is + // simulating those calls and needs to special-case this. + let stats = null; + + if (methods.lstat) { + stats = await handlers.lstat(filePath, true); + } else { + stats = await handlers.lstat(filePath); + } + + details.relative = path.join(relativePath, details.base); + + if (stats.isDirectory()) { + details.base += slashSuffix; + details.relative += slashSuffix; + details.type = 'folder'; + } else { + if (canRenderSingle) { + return { + singleFile: true, + absolutePath: filePath, + stats + }; + } + + details.ext = details.ext.split('.')[1] || 'txt'; + details.type = 'file'; + + details.size = bytes(stats.size, { + unitSeparator: ' ', + decimalPlaces: 0 + }); + } + + details.title = details.base; + + if (canBeListed(excluded, file)) { + files[index] = details; + } else { + delete files[index]; + } + } + + const toRoot = path.relative(current, absolutePath); + const directory = path.join(path.basename(current), toRoot, slashSuffix); + const pathParts = directory.split(path.sep).filter(Boolean); + + // Sort to list directories first, then sort alphabetically + files = files.sort((a, b) => { + const aIsDir = a.type === 'directory'; + const bIsDir = b.type === 'directory'; + + /* istanbul ignore next */ + if (aIsDir && !bIsDir) { + return -1; + } + + if ((bIsDir && !aIsDir) || (a.base > b.base)) { + return 1; + } + + /* istanbul ignore next */ + if (a.base < b.base) { + return -1; + } + + /* istanbul ignore next */ + return 0; + }).filter(Boolean); + + // Add parent directory to the head of the sorted files array + if (toRoot.length > 0) { + const directoryPath = [...pathParts].slice(1); + const relative = path.join('/', ...directoryPath, '..', slashSuffix); + + files.unshift({ + type: 'directory', + base: '..', + relative, + title: relative, + ext: '' + }); + } + + const subPaths = []; + + for (let index = 0; index < pathParts.length; index++) { + const parents = []; + const isLast = index === (pathParts.length - 1); + + let before = 0; + + while (before <= index) { + parents.push(pathParts[before]); + before++; + } + + parents.shift(); + + subPaths.push({ + name: pathParts[index] + (isLast ? slashSuffix : '/'), + url: index === 0 ? '' : parents.join('/') + slashSuffix + }); + } + + const spec = { + files, + directory, + paths: subPaths + }; + + const output = acceptsJSON ? JSON.stringify(spec) : directoryTemplate(spec); + + return {directory: output}; +}; + +const sendError = async (absolutePath, response, acceptsJSON, current, handlers, config, spec) => { + const {err: original, message, code, statusCode} = spec; + + /* istanbul ignore next */ + if (original && process.env.NODE_ENV !== 'test') { + console.error(original); + } + + response.statusCode = statusCode; + + if (acceptsJSON) { + response.setHeader('Content-Type', 'application/json; charset=utf-8'); + + response.end(JSON.stringify({ + error: { + code, + message + } + })); + + return; + } + + let stats = null; + + const errorPage = path.join(current, `${statusCode}.html`); + + try { + stats = await handlers.lstat(errorPage); + } catch (err) { + if (err.code !== 'ENOENT') { + console.error(err); + } + } + + if (stats) { + let stream = null; + + try { + stream = await handlers.createReadStream(errorPage); + + const headers = await getHeaders(handlers, config, current, errorPage, stats); + + response.writeHead(statusCode, headers); + stream.pipe(response); + + return; + } catch (err) { + console.error(err); + } + } + + const headers = await getHeaders(handlers, config, current, absolutePath, null); + headers['Content-Type'] = 'text/html; charset=utf-8'; + + response.writeHead(statusCode, headers); + response.end(errorTemplate({statusCode, message})); +}; + +const internalError = async (...args) => { + const lastIndex = args.length - 1; + const err = args[lastIndex]; + + args[lastIndex] = { + statusCode: 500, + code: 'internal_server_error', + message: 'A server error has occurred', + err + }; + + return sendError(...args); +}; + +const getHandlers = methods => Object.assign({ + lstat: promisify(lstat), + realpath: promisify(realpath), + createReadStream, + readdir: promisify(readdir), + sendError +}, methods); + +module.exports = async (request, response, config = {}, methods = {}) => { + const cwd = process.cwd(); + const current = config.public ? path.resolve(cwd, config.public) : cwd; + const handlers = getHandlers(methods); + + let relativePath = null; + let acceptsJSON = null; + + if (request.headers.accept) { + acceptsJSON = request.headers.accept.includes('application/json'); + } + + try { + relativePath = decodeURIComponent(url.parse(request.url).pathname); + } catch (err) { + return sendError('/', response, acceptsJSON, current, handlers, config, { + statusCode: 400, + code: 'bad_request', + message: 'Bad Request' + }); + } + + let absolutePath = path.join(current, relativePath); + + // Prevent path traversal vulnerabilities. We could do this + // by ourselves, but using the package covers all the edge cases. + if (!isPathInside(absolutePath, current)) { + return sendError(absolutePath, response, acceptsJSON, current, handlers, config, { + statusCode: 400, + code: 'bad_request', + message: 'Bad Request' + }); + } + + const cleanUrl = applicable(relativePath, config.cleanUrls); + const redirect = shouldRedirect(relativePath, config, cleanUrl); + + if (redirect) { + response.writeHead(redirect.statusCode, { + Location: encodeURI(redirect.target) + }); + + response.end(); + return; + } + + let stats = null; + + // It's extremely important that we're doing multiple stat calls. This one + // right here could technically be removed, but then the program + // would be slower. Because for directories, we always want to see if a related file + // exists and then (after that), fetch the directory itself if no + // related file was found. However (for files, of which most have extensions), we should + // always stat right away. + // + // When simulating a file system without directory indexes, calculating whether a + // directory exists requires loading all the file paths and then checking if + // one of them includes the path of the directory. As that's a very + // performance-expensive thing to do, we need to ensure it's not happening if not really necessary. + + if (path.extname(relativePath) !== '') { + try { + stats = await handlers.lstat(absolutePath); + } catch (err) { + if (err.code !== 'ENOENT' && err.code !== 'ENOTDIR') { + return internalError(absolutePath, response, acceptsJSON, current, handlers, config, err); + } + } + } + + const rewrittenPath = applyRewrites(relativePath, config.rewrites); + + if (!stats && (cleanUrl || rewrittenPath)) { + try { + const related = await findRelated(current, relativePath, rewrittenPath, handlers.lstat); + + if (related) { + ({stats, absolutePath} = related); + } + } catch (err) { + if (err.code !== 'ENOENT' && err.code !== 'ENOTDIR') { + return internalError(absolutePath, response, acceptsJSON, current, handlers, config, err); + } + } + } + + if (!stats) { + try { + stats = await handlers.lstat(absolutePath); + } catch (err) { + if (err.code !== 'ENOENT' && err.code !== 'ENOTDIR') { + return internalError(absolutePath, response, acceptsJSON, current, handlers, config, err); + } + } + } + + if (stats && stats.isDirectory()) { + let directory = null; + let singleFile = null; + + try { + const related = await renderDirectory(current, acceptsJSON, handlers, methods, config, { + relativePath, + absolutePath + }); + + if (related.singleFile) { + ({stats, absolutePath, singleFile} = related); + } else { + ({directory} = related); + } + } catch (err) { + if (err.code !== 'ENOENT') { + return internalError(absolutePath, response, acceptsJSON, current, handlers, config, err); + } + } + + if (directory) { + const contentType = acceptsJSON ? 'application/json; charset=utf-8' : 'text/html; charset=utf-8'; + + response.statusCode = 200; + response.setHeader('Content-Type', contentType); + response.end(directory); + + return; + } + + if (!singleFile) { + // The directory listing is disabled, so we want to + // render a 404 error. + stats = null; + } + } + + const isSymLink = stats && stats.isSymbolicLink(); + + // There are two scenarios in which we want to reply with + // a 404 error: Either the path does not exist, or it is a + // symlink while the `symlinks` option is disabled (which it is by default). + if (!stats || (!config.symlinks && isSymLink)) { + // allow for custom 404 handling + return handlers.sendError(absolutePath, response, acceptsJSON, current, handlers, config, { + statusCode: 404, + code: 'not_found', + message: 'The requested path could not be found' + }); + } + + // If we figured out that the target is a symlink, we need to + // resolve the symlink and run a new `stat` call just for the + // target of that symlink. + if (isSymLink) { + absolutePath = await handlers.realpath(absolutePath); + stats = await handlers.lstat(absolutePath); + } + + const streamOpts = {}; + + // TODO ? if-range + if (request.headers.range && stats.size) { + const range = parseRange(stats.size, request.headers.range); + + if (typeof range === 'object' && range.type === 'bytes') { + const {start, end} = range[0]; + + streamOpts.start = start; + streamOpts.end = end; + + response.statusCode = 206; + } else { + response.statusCode = 416; + response.setHeader('Content-Range', `bytes */${stats.size}`); + } + } + + // TODO ? multiple ranges + + let stream = null; + + try { + stream = await handlers.createReadStream(absolutePath, streamOpts); + } catch (err) { + return internalError(absolutePath, response, acceptsJSON, current, handlers, config, err); + } + + const headers = await getHeaders(handlers, config, current, absolutePath, stats); + + // eslint-disable-next-line no-undefined + if (streamOpts.start !== undefined && streamOpts.end !== undefined) { + headers['Content-Range'] = `bytes ${streamOpts.start}-${streamOpts.end}/${stats.size}`; + headers['Content-Length'] = streamOpts.end - streamOpts.start + 1; + } + + // We need to check for `headers.ETag` being truthy first, otherwise it will + // match `undefined` being equal to `undefined`, which is true. + // + // Checking for `undefined` and `null` is also important, because `Range` can be `0`. + // + // eslint-disable-next-line no-eq-null + if (request.headers.range == null && headers.ETag && headers.ETag === request.headers['if-none-match']) { + response.statusCode = 304; + response.end(); + + return; + } + + response.writeHead(response.statusCode || 200, headers); + stream.pipe(response); +};