MinsoftK

deleting

Showing 1000 changed files with 0 additions and 4694 deletions

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.

1 -/*
2 - Copyright 2018 Google LLC
3 -
4 - Use of this source code is governed by an MIT-style
5 - license that can be found in the LICENSE file or at
6 - https://opensource.org/licenses/MIT.
7 -*/
8 -
9 -import {assert} from 'workbox-core/_private/assert.js';
10 -import {timeout} from 'workbox-core/_private/timeout.js';
11 -import {resultingClientExists} from 'workbox-core/_private/resultingClientExists.js';
12 -import {CacheDidUpdateCallbackParam} from 'workbox-core/types.js';
13 -import {logger} from 'workbox-core/_private/logger.js';
14 -import {responsesAreSame} from './responsesAreSame.js';
15 -import {CACHE_UPDATED_MESSAGE_TYPE, CACHE_UPDATED_MESSAGE_META, DEFAULT_HEADERS_TO_CHECK} from './utils/constants.js';
16 -
17 -import './_version.js';
18 -
19 -
20 -// UA-sniff Safari: https://stackoverflow.com/questions/7944460/detect-safari-browser
21 -// TODO(philipwalton): remove once this Safari bug fix has been released.
22 -// https://bugs.webkit.org/show_bug.cgi?id=201169
23 -const isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
24 -
25 -
26 -// Give TypeScript the correct global.
27 -declare let self: ServiceWorkerGlobalScope;
28 -
29 -export interface BroadcastCacheUpdateOptions {
30 - headersToCheck?: string[];
31 - generatePayload?: (options: CacheDidUpdateCallbackParam) => Record<string, any>;
32 -}
33 -
34 -/**
35 - * Generates the default payload used in update messages. By default the
36 - * payload includes the `cacheName` and `updatedURL` fields.
37 - *
38 - * @return Object
39 - * @private
40 - */
41 -function defaultPayloadGenerator(data: CacheDidUpdateCallbackParam): Record<string, any> {
42 - return {
43 - cacheName: data.cacheName,
44 - updatedURL: data.request.url,
45 - };
46 -}
47 -
48 -/**
49 - * Uses the `postMessage()` API to inform any open windows/tabs when a cached
50 - * response has been updated.
51 - *
52 - * For efficiency's sake, the underlying response bodies are not compared;
53 - * only specific response headers are checked.
54 - *
55 - * @memberof module:workbox-broadcast-update
56 - */
57 -class BroadcastCacheUpdate {
58 - private readonly _headersToCheck: string[];
59 - private readonly _generatePayload: (options: CacheDidUpdateCallbackParam) => Record<string, any>;
60 -
61 - /**
62 - * Construct a BroadcastCacheUpdate instance with a specific `channelName` to
63 - * broadcast messages on
64 - *
65 - * @param {Object} options
66 - * @param {Array<string>} [options.headersToCheck=['content-length', 'etag', 'last-modified']]
67 - * A list of headers that will be used to determine whether the responses
68 - * differ.
69 - * @param {string} [options.generatePayload] A function whose return value
70 - * will be used as the `payload` field in any cache update messages sent
71 - * to the window clients.
72 - */
73 - constructor({
74 - headersToCheck,
75 - generatePayload,
76 - }: BroadcastCacheUpdateOptions = {}) {
77 - this._headersToCheck = headersToCheck || DEFAULT_HEADERS_TO_CHECK;
78 - this._generatePayload = generatePayload || defaultPayloadGenerator;
79 - }
80 -
81 - /**
82 - * Compares two [Responses](https://developer.mozilla.org/en-US/docs/Web/API/Response)
83 - * and sends a message (via `postMessage()`) to all window clients if the
84 - * responses differ (note: neither of the Responses can be
85 - * {@link http://stackoverflow.com/questions/39109789|opaque}).
86 - *
87 - * The message that's posted has the following format (where `payload` can
88 - * be customized via the `generatePayload` option the instance is created
89 - * with):
90 - *
91 - * ```
92 - * {
93 - * type: 'CACHE_UPDATED',
94 - * meta: 'workbox-broadcast-update',
95 - * payload: {
96 - * cacheName: 'the-cache-name',
97 - * updatedURL: 'https://example.com/'
98 - * }
99 - * }
100 - * ```
101 - *
102 - * @param {Object} options
103 - * @param {Response} [options.oldResponse] Cached response to compare.
104 - * @param {Response} options.newResponse Possibly updated response to compare.
105 - * @param {Request} options.request The request.
106 - * @param {string} options.cacheName Name of the cache the responses belong
107 - * to. This is included in the broadcast message.
108 - * @param {Event} [options.event] event An optional event that triggered
109 - * this possible cache update.
110 - * @return {Promise} Resolves once the update is sent.
111 - */
112 - async notifyIfUpdated(options: CacheDidUpdateCallbackParam): Promise<void> {
113 - if (process.env.NODE_ENV !== 'production') {
114 - assert!.isType(options.cacheName, 'string', {
115 - moduleName: 'workbox-broadcast-update',
116 - className: 'BroadcastCacheUpdate',
117 - funcName: 'notifyIfUpdated',
118 - paramName: 'cacheName',
119 - });
120 - assert!.isInstance(options.newResponse, Response, {
121 - moduleName: 'workbox-broadcast-update',
122 - className: 'BroadcastCacheUpdate',
123 - funcName: 'notifyIfUpdated',
124 - paramName: 'newResponse',
125 - });
126 - assert!.isInstance(options.request, Request, {
127 - moduleName: 'workbox-broadcast-update',
128 - className: 'BroadcastCacheUpdate',
129 - funcName: 'notifyIfUpdated',
130 - paramName: 'request',
131 - });
132 - }
133 -
134 - // Without two responses there is nothing to compare.
135 - if (!options.oldResponse) {
136 - return;
137 - }
138 -
139 - if (!responsesAreSame(options.oldResponse, options.newResponse, this._headersToCheck)) {
140 - if (process.env.NODE_ENV !== 'production') {
141 - logger.log(
142 - `Newer response found (and cached) for:`, options.request.url);
143 - }
144 -
145 - const messageData = {
146 - type: CACHE_UPDATED_MESSAGE_TYPE,
147 - meta: CACHE_UPDATED_MESSAGE_META,
148 - payload: this._generatePayload(options),
149 - };
150 -
151 - // For navigation requests, wait until the new window client exists
152 - // before sending the message
153 - if (options.request.mode === 'navigate') {
154 - let resultingClientId: string | undefined;
155 - if (options.event instanceof FetchEvent) {
156 - resultingClientId = options.event.resultingClientId;
157 - }
158 -
159 - const resultingWin = await resultingClientExists(resultingClientId);
160 -
161 - // Safari does not currently implement postMessage buffering and
162 - // there's no good way to feature detect that, so to increase the
163 - // chances of the message being delivered in Safari, we add a timeout.
164 - // We also do this if `resultingClientExists()` didn't return a client,
165 - // which means it timed out, so it's worth waiting a bit longer.
166 - if (!resultingWin || isSafari) {
167 - // 3500 is chosen because (according to CrUX data) 80% of mobile
168 - // websites hit the DOMContentLoaded event in less than 3.5 seconds.
169 - // And presumably sites implementing service worker are on the
170 - // higher end of the performance spectrum.
171 - await timeout(3500);
172 - }
173 - }
174 -
175 - const windows = await self.clients.matchAll({type: 'window'});
176 - for (const win of windows) {
177 - win.postMessage(messageData);
178 - }
179 - }
180 - }
181 -}
182 -
183 -export {BroadcastCacheUpdate};
1 -/*
2 - Copyright 2018 Google LLC
3 -
4 - Use of this source code is governed by an MIT-style
5 - license that can be found in the LICENSE file or at
6 - https://opensource.org/licenses/MIT.
7 -*/
8 -
9 -import {dontWaitFor} from 'workbox-core/_private/dontWaitFor.js';
10 -import {WorkboxPlugin} from 'workbox-core/types.js';
11 -
12 -import {BroadcastCacheUpdate, BroadcastCacheUpdateOptions} from './BroadcastCacheUpdate.js';
13 -
14 -import './_version.js';
15 -
16 -/**
17 - * This plugin will automatically broadcast a message whenever a cached response
18 - * is updated.
19 - *
20 - * @memberof module:workbox-broadcast-update
21 - */
22 -class BroadcastUpdatePlugin implements WorkboxPlugin {
23 - private readonly _broadcastUpdate: BroadcastCacheUpdate;
24 -
25 - /**
26 - * Construct a BroadcastCacheUpdate instance with the passed options and
27 - * calls its [`notifyIfUpdated()`]{@link module:workbox-broadcast-update.BroadcastCacheUpdate~notifyIfUpdated}
28 - * method whenever the plugin's `cacheDidUpdate` callback is invoked.
29 - *
30 - * @param {Object} options
31 - * @param {Array<string>} [options.headersToCheck=['content-length', 'etag', 'last-modified']]
32 - * A list of headers that will be used to determine whether the responses
33 - * differ.
34 - * @param {string} [options.generatePayload] A function whose return value
35 - * will be used as the `payload` field in any cache update messages sent
36 - * to the window clients.
37 - */
38 - constructor(options: BroadcastCacheUpdateOptions) {
39 - this._broadcastUpdate = new BroadcastCacheUpdate(options);
40 - }
41 -
42 - /**
43 - * A "lifecycle" callback that will be triggered automatically by the
44 - * `workbox-sw` and `workbox-runtime-caching` handlers when an entry is
45 - * added to a cache.
46 - *
47 - * @private
48 - * @param {Object} options The input object to this function.
49 - * @param {string} options.cacheName Name of the cache being updated.
50 - * @param {Response} [options.oldResponse] The previous cached value, if any.
51 - * @param {Response} options.newResponse The new value in the cache.
52 - * @param {Request} options.request The request that triggered the update.
53 - * @param {Request} [options.event] The event that triggered the update.
54 - */
55 - cacheDidUpdate: WorkboxPlugin['cacheDidUpdate'] = async (options) => {
56 - dontWaitFor(this._broadcastUpdate.notifyIfUpdated(options));
57 - }
58 -}
59 -
60 -export {BroadcastUpdatePlugin};
1 -// @ts-ignore
2 -try{self['workbox:broadcast-update:5.1.4']&&_()}catch(e){}
...\ No newline at end of file ...\ No newline at end of file
1 -/*
2 - Copyright 2018 Google LLC
3 -
4 - Use of this source code is governed by an MIT-style
5 - license that can be found in the LICENSE file or at
6 - https://opensource.org/licenses/MIT.
7 -*/
8 -
9 -import {BroadcastCacheUpdate} from './BroadcastCacheUpdate.js';
10 -import {BroadcastUpdatePlugin} from './BroadcastUpdatePlugin.js';
11 -import {responsesAreSame} from './responsesAreSame.js';
12 -import './_version.js';
13 -
14 -
15 -/**
16 - * @module workbox-broadcast-update
17 - */
18 -
19 -export {
20 - BroadcastCacheUpdate,
21 - BroadcastUpdatePlugin,
22 - responsesAreSame,
23 -};
1 -/*
2 - Copyright 2018 Google LLC
3 -
4 - Use of this source code is governed by an MIT-style
5 - license that can be found in the LICENSE file or at
6 - https://opensource.org/licenses/MIT.
7 -*/
8 -
9 -import {WorkboxError} from 'workbox-core/_private/WorkboxError.js';
10 -import {logger} from 'workbox-core/_private/logger.js';
11 -import './_version.js';
12 -
13 -
14 -/**
15 - * Given two `Response's`, compares several header values to see if they are
16 - * the same or not.
17 - *
18 - * @param {Response} firstResponse
19 - * @param {Response} secondResponse
20 - * @param {Array<string>} headersToCheck
21 - * @return {boolean}
22 - *
23 - * @memberof module:workbox-broadcast-update
24 - */
25 -const responsesAreSame = (
26 - firstResponse: Response,
27 - secondResponse: Response,
28 - headersToCheck: string[],
29 -) => {
30 - if (process.env.NODE_ENV !== 'production') {
31 - if (!(firstResponse instanceof Response &&
32 - secondResponse instanceof Response)) {
33 - throw new WorkboxError('invalid-responses-are-same-args');
34 - }
35 - }
36 -
37 - const atLeastOneHeaderAvailable = headersToCheck.some((header) => {
38 - return firstResponse.headers.has(header) &&
39 - secondResponse.headers.has(header);
40 - });
41 -
42 - if (!atLeastOneHeaderAvailable) {
43 - if (process.env.NODE_ENV !== 'production') {
44 - logger.warn(`Unable to determine where the response has been updated ` +
45 - `because none of the headers that would be checked are present.`);
46 - logger.debug(`Attempting to compare the following: `,
47 - firstResponse, secondResponse, headersToCheck);
48 - }
49 -
50 - // Just return true, indicating the that responses are the same, since we
51 - // can't determine otherwise.
52 - return true;
53 - }
54 -
55 - return headersToCheck.every((header) => {
56 - const headerStateComparison = firstResponse.headers.has(header) ===
57 - secondResponse.headers.has(header);
58 - const headerValueComparison = firstResponse.headers.get(header) ===
59 - secondResponse.headers.get(header);
60 -
61 - return headerStateComparison && headerValueComparison;
62 - });
63 -};
64 -
65 -export {responsesAreSame};
1 -/*
2 - Copyright 2018 Google LLC
3 -
4 - Use of this source code is governed by an MIT-style
5 - license that can be found in the LICENSE file or at
6 - https://opensource.org/licenses/MIT.
7 -*/
8 -
9 -import '../_version.js';
10 -
11 -export const CACHE_UPDATED_MESSAGE_TYPE = 'CACHE_UPDATED';
12 -export const CACHE_UPDATED_MESSAGE_META = 'workbox-broadcast-update';
13 -export const DEFAULT_HEADERS_TO_CHECK: string[] = [
14 - 'content-length',
15 - 'etag',
16 - 'last-modified',
17 -];
1 -{
2 - "extends": "../../tsconfig",
3 - "compilerOptions": {
4 - "outDir": "./",
5 - "rootDir": "./src",
6 - "tsBuildInfoFile": "./tsconfig.tsbuildinfo"
7 - },
8 - "include": [
9 - "src/**/*.ts"
10 - ],
11 - "references": [
12 - { "path": "../workbox-core/" }
13 - ]
14 -}
This diff could not be displayed because it is too large.
1 -import '../_version.js';
2 -export declare const CACHE_UPDATED_MESSAGE_TYPE = "CACHE_UPDATED";
3 -export declare const CACHE_UPDATED_MESSAGE_META = "workbox-broadcast-update";
4 -export declare const DEFAULT_HEADERS_TO_CHECK: string[];
1 -/*
2 - Copyright 2018 Google LLC
3 -
4 - Use of this source code is governed by an MIT-style
5 - license that can be found in the LICENSE file or at
6 - https://opensource.org/licenses/MIT.
7 -*/
8 -import '../_version.js';
9 -export const CACHE_UPDATED_MESSAGE_TYPE = 'CACHE_UPDATED';
10 -export const CACHE_UPDATED_MESSAGE_META = 'workbox-broadcast-update';
11 -export const DEFAULT_HEADERS_TO_CHECK = [
12 - 'content-length',
13 - 'etag',
14 - 'last-modified',
15 -];
1 -export * from './constants.js';
...\ No newline at end of file ...\ No newline at end of file
1 -Copyright 2018 Google LLC
2 -
3 -Permission is hereby granted, free of charge, to any person obtaining a copy
4 -of this software and associated documentation files (the "Software"), to deal
5 -in the Software without restriction, including without limitation the rights
6 -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 -copies of the Software, and to permit persons to whom the Software is
8 -furnished to do so, subject to the following conditions:
9 -
10 -The above copyright notice and this permission notice shall be included in
11 -all copies or substantial portions of the Software.
12 -
13 -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 -THE SOFTWARE.
1 -This module's documentation can be found at https://developers.google.com/web/tools/workbox/modules/workbox-build
1 -"use strict";
2 -
3 -require("./_version.mjs");
...\ No newline at end of file ...\ No newline at end of file
1 -{
2 - "origin": "https://storage.googleapis.com",
3 - "bucketName": "workbox-cdn",
4 - "releasesDir": "releases",
5 - "latestVersion": "5.1.4"
6 -}
This diff is collapsed. Click to expand it.
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const getFileManifestEntries = require('./lib/get-file-manifest-entries');
11 -
12 -const getManifestSchema = require('./options/schema/get-manifest');
13 -
14 -const validate = require('./lib/validate-options'); // eslint-disable-next-line jsdoc/newline-after-description
15 -
16 -/**
17 - * This method returns a list of URLs to precache, referred to as a "precache
18 - * manifest", along with details about the number of entries and their size,
19 - * based on the options you provide.
20 - *
21 - * @param {Object} config The configuration to use.
22 - *
23 - * @param {string} config.globDirectory The local directory you wish to match
24 - * `globPatterns` against. The path is relative to the current directory.
25 - *
26 - * @param {Array<module:workbox-build.ManifestEntry>} [config.additionalManifestEntries]
27 - * A list of entries to be precached, in addition to any entries that are
28 - * generated as part of the build configuration.
29 - *
30 - * @param {RegExp} [config.dontCacheBustURLsMatching] Assets that match this will be
31 - * assumed to be uniquely versioned via their URL, and exempted from the normal
32 - * HTTP cache-busting that's done when populating the precache. While not
33 - * required, it's recommended that if your existing build process already
34 - * inserts a `[hash]` value into each filename, you provide a RegExp that will
35 - * detect that, as it will reduce the bandwidth consumed when precaching.
36 - *
37 - * @param {boolean} [config.globFollow=true] Determines whether or not symlinks
38 - * are followed when generating the precache manifest. For more information, see
39 - * the definition of `follow` in the `glob`
40 - * [documentation](https://github.com/isaacs/node-glob#options).
41 - *
42 - * @param {Array<string>} [config.globIgnores=['node_modules/**']]
43 - * A set of patterns matching files to always exclude when generating the
44 - * precache manifest. For more information, see the definition of `ignore` in the `glob`
45 - * [documentation](https://github.com/isaacs/node-glob#options).
46 - *
47 - * @param {Array<string>} [config.globPatterns=['**.{js,css,html}']]
48 - * Files matching any of these patterns will be included in the precache
49 - * manifest. For more information, see the
50 - * [`glob` primer](https://github.com/isaacs/node-glob#glob-primer).
51 - *
52 - * @param {boolean} [config.globStrict=true] If true, an error reading a directory when
53 - * generating a precache manifest will cause the build to fail. If false, the
54 - * problematic directory will be skipped. For more information, see the
55 - * definition of `strict` in the `glob`
56 - * [documentation](https://github.com/isaacs/node-glob#options).
57 - *
58 - * @param {Array<module:workbox-build.ManifestTransform>} [config.manifestTransforms] One or more
59 - * functions which will be applied sequentially against the generated manifest.
60 - * If `modifyURLPrefix` or `dontCacheBustURLsMatching` are also specified, their
61 - * corresponding transformations will be applied first.
62 - *
63 - * @param {number} [config.maximumFileSizeToCacheInBytes=2097152] This value can be
64 - * used to determine the maximum size of files that will be precached. This
65 - * prevents you from inadvertently precaching very large files that might have
66 - * accidentally matched one of your patterns.
67 - *
68 - * @param {string} [config.mode='production'] If set to 'production', then an
69 - * optimized service worker bundle that excludes debugging info will be
70 - * produced. If not explicitly configured here, the `process.env.NODE_ENV` value
71 - * will be used, and failing that, it will fall back to `'production'`.
72 - *
73 - * @param {object<string, string>} [config.modifyURLPrefix] A mapping of prefixes
74 - * that, if present in an entry in the precache manifest, will be replaced with
75 - * the corresponding value. This can be used to, for example, remove or add a
76 - * path prefix from a manifest entry if your web hosting setup doesn't match
77 - * your local filesystem setup. As an alternative with more flexibility, you can
78 - * use the `manifestTransforms` option and provide a function that modifies the
79 - * entries in the manifest using whatever logic you provide.
80 - *
81 - * @param {Object} [config.templatedURLs] If a URL is rendered based on some
82 - * server-side logic, its contents may depend on multiple files or on some other
83 - * unique string value. The keys in this object are server-rendered URLs. If the
84 - * values are an array of strings, they will be interpreted as `glob` patterns,
85 - * and the contents of any files matching the patterns will be used to uniquely
86 - * version the URL. If used with a single string, it will be interpreted as
87 - * unique versioning information that you've generated for a given URL.
88 - *
89 - * @return {Promise<{count: number, manifestEntries: Array<module:workbox-build.ManifestEntry>, size: number, warnings: Array<string>}>}
90 - * A promise that resolves once the precache manifest (available in the
91 - * `manifestEntries` property) has been determined. The `size` property
92 - * contains the aggregate size of all the precached entries, in bytes, and the
93 - * `count` property contains the total number of precached entries. Any
94 - * non-fatal warning messages will be returned via `warnings`.
95 - *
96 - * @memberof module:workbox-build
97 - */
98 -
99 -
100 -async function getManifest(config) {
101 - const options = validate(config, getManifestSchema);
102 - const {
103 - manifestEntries,
104 - count,
105 - size,
106 - warnings
107 - } = await getFileManifestEntries(options);
108 - return {
109 - manifestEntries,
110 - count,
111 - size,
112 - warnings
113 - };
114 -}
115 -
116 -module.exports = getManifest;
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const {
11 - getModuleURL
12 -} = require('./lib/cdn-utils');
13 -
14 -const copyWorkboxLibraries = require('./lib/copy-workbox-libraries');
15 -
16 -const generateSW = require('./generate-sw');
17 -
18 -const getManifest = require('./get-manifest');
19 -
20 -const injectManifest = require('./inject-manifest');
21 -/**
22 - * @module workbox-build
23 - */
24 -
25 -
26 -module.exports = {
27 - copyWorkboxLibraries,
28 - generateSW,
29 - getManifest,
30 - getModuleURL,
31 - injectManifest
32 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const assert = require('assert');
11 -
12 -const fse = require('fs-extra');
13 -
14 -const sourceMapURL = require('source-map-url');
15 -
16 -const stringify = require('fast-json-stable-stringify');
17 -
18 -const upath = require('upath');
19 -
20 -const errors = require('./lib/errors');
21 -
22 -const escapeRegexp = require('./lib/escape-regexp');
23 -
24 -const getFileManifestEntries = require('./lib/get-file-manifest-entries');
25 -
26 -const injectManifestSchema = require('./options/schema/inject-manifest');
27 -
28 -const rebasePath = require('./lib/rebase-path');
29 -
30 -const replaceAndUpdateSourceMap = require('./lib/replace-and-update-source-map');
31 -
32 -const validate = require('./lib/validate-options'); // eslint-disable-next-line jsdoc/newline-after-description
33 -
34 -/**
35 - * This method creates a list of URLs to precache, referred to as a "precache
36 - * manifest", based on the options you provide.
37 - *
38 - * The manifest is injected into the `swSrc` file, and the placeholder string
39 - * `injectionPoint` determines where in the file the manifest should go.
40 - *
41 - * The final service worker file, with the manifest injected, is written to
42 - * disk at `swDest`.
43 - *
44 - * @param {Object} config The configuration to use.
45 - *
46 - * @param {string} config.globDirectory The local directory you wish to match
47 - * `globPatterns` against. The path is relative to the current directory.
48 - *
49 - * @param {string} config.swDest The path and filename of the service worker file
50 - * that will be created by the build process, relative to the current working
51 - * directory. It must end in '.js'.
52 - *
53 - * @param {string} config.swSrc The path and filename of the service worker file
54 - * that will be read during the build process, relative to the current working
55 - * directory.
56 - *
57 - * @param {Array<module:workbox-build.ManifestEntry>} [config.additionalManifestEntries]
58 - * A list of entries to be precached, in addition to any entries that are
59 - * generated as part of the build configuration.
60 - *
61 - * @param {RegExp} [config.dontCacheBustURLsMatching] Assets that match this will be
62 - * assumed to be uniquely versioned via their URL, and exempted from the normal
63 - * HTTP cache-busting that's done when populating the precache. While not
64 - * required, it's recommended that if your existing build process already
65 - * inserts a `[hash]` value into each filename, you provide a RegExp that will
66 - * detect that, as it will reduce the bandwidth consumed when precaching.
67 - *
68 - * @param {boolean} [config.globFollow=true] Determines whether or not symlinks
69 - * are followed when generating the precache manifest. For more information, see
70 - * the definition of `follow` in the `glob`
71 - * [documentation](https://github.com/isaacs/node-glob#options).
72 - *
73 - * @param {Array<string>} [config.globIgnores=['node_modules/**']]
74 - * A set of patterns matching files to always exclude when generating the
75 - * precache manifest. For more information, see the definition of `ignore` in the `glob`
76 - * [documentation](https://github.com/isaacs/node-glob#options).
77 - *
78 - * @param {Array<string>} [config.globPatterns=['**.{js,css,html}']]
79 - * Files matching any of these patterns will be included in the precache
80 - * manifest. For more information, see the
81 - * [`glob` primer](https://github.com/isaacs/node-glob#glob-primer).
82 - *
83 - * @param {boolean} [config.globStrict=true] If true, an error reading a directory when
84 - * generating a precache manifest will cause the build to fail. If false, the
85 - * problematic directory will be skipped. For more information, see the
86 - * definition of `strict` in the `glob`
87 - * [documentation](https://github.com/isaacs/node-glob#options).
88 - *
89 - * @param {string} [config.injectionPoint='self.__WB_MANIFEST'] The string to
90 - * find inside of the `swSrc` file. Once found, it will be replaced by the
91 - * generated precache manifest.
92 - *
93 - * @param {Array<module:workbox-build.ManifestTransform>} [config.manifestTransforms] One or more
94 - * functions which will be applied sequentially against the generated manifest.
95 - * If `modifyURLPrefix` or `dontCacheBustURLsMatching` are also specified, their
96 - * corresponding transformations will be applied first.
97 - *
98 - * @param {number} [config.maximumFileSizeToCacheInBytes=2097152] This value can be
99 - * used to determine the maximum size of files that will be precached. This
100 - * prevents you from inadvertently precaching very large files that might have
101 - * accidentally matched one of your patterns.
102 - *
103 - * @param {string} [config.mode='production'] If set to 'production', then an
104 - * optimized service worker bundle that excludes debugging info will be
105 - * produced. If not explicitly configured here, the `process.env.NODE_ENV` value
106 - * will be used, and failing that, it will fall back to `'production'`.
107 - *
108 - * @param {object<string, string>} [config.modifyURLPrefix] A mapping of prefixes
109 - * that, if present in an entry in the precache manifest, will be replaced with
110 - * the corresponding value. This can be used to, for example, remove or add a
111 - * path prefix from a manifest entry if your web hosting setup doesn't match
112 - * your local filesystem setup. As an alternative with more flexibility, you can
113 - * use the `manifestTransforms` option and provide a function that modifies the
114 - * entries in the manifest using whatever logic you provide.
115 - *
116 - * @param {Object} [config.templatedURLs] If a URL is rendered based on some
117 - * server-side logic, its contents may depend on multiple files or on some other
118 - * unique string value. The keys in this object are server-rendered URLs. If the
119 - * values are an array of strings, they will be interpreted as `glob` patterns,
120 - * and the contents of any files matching the patterns will be used to uniquely
121 - * version the URL. If used with a single string, it will be interpreted as
122 - * unique versioning information that you've generated for a given URL.
123 - *
124 - * @return {Promise<{count: number, filePaths: Array<string>, size: number, warnings: Array<string>}>}
125 - * A promise that resolves once the service worker and related files
126 - * (indicated by `filePaths`) has been written to `swDest`. The `size` property
127 - * contains the aggregate size of all the precached entries, in bytes, and the
128 - * `count` property contains the total number of precached entries. Any
129 - * non-fatal warning messages will be returned via `warnings`.
130 - *
131 - * @memberof module:workbox-build
132 - */
133 -
134 -
135 -async function injectManifest(config) {
136 - const options = validate(config, injectManifestSchema); // Make sure we leave swSrc and swDest out of the precache manifest.
137 -
138 - for (const file of [options.swSrc, options.swDest]) {
139 - options.globIgnores.push(rebasePath({
140 - file,
141 - baseDirectory: options.globDirectory
142 - }));
143 - }
144 -
145 - const globalRegexp = new RegExp(escapeRegexp(options.injectionPoint), 'g');
146 - const {
147 - count,
148 - size,
149 - manifestEntries,
150 - warnings
151 - } = await getFileManifestEntries(options);
152 - let swFileContents;
153 -
154 - try {
155 - swFileContents = await fse.readFile(options.swSrc, 'utf8');
156 - } catch (error) {
157 - throw new Error(`${errors['invalid-sw-src']} ${error.message}`);
158 - }
159 -
160 - const injectionResults = swFileContents.match(globalRegexp);
161 -
162 - if (!injectionResults) {
163 - // See https://github.com/GoogleChrome/workbox/issues/2230
164 - if (upath.resolve(options.swSrc) === upath.resolve(options.swDest)) {
165 - throw new Error(errors['same-src-and-dest'] + ' ' + options.injectionPoint);
166 - }
167 -
168 - throw new Error(errors['injection-point-not-found'] + ' ' + options.injectionPoint);
169 - }
170 -
171 - assert(injectionResults.length === 1, errors['multiple-injection-points'] + options.injectionPoint);
172 - const manifestString = stringify(manifestEntries);
173 - const filesToWrite = {};
174 - const url = sourceMapURL.getFrom(swFileContents); // If our swSrc file contains a sourcemap, we would invalidate that
175 - // mapping if we just replaced injectionPoint with the stringified manifest.
176 - // Instead, we need to update the swDest contents as well as the sourcemap
177 - // at the same time.
178 - // See https://github.com/GoogleChrome/workbox/issues/2235
179 -
180 - if (url) {
181 - const sourcemapSrcPath = upath.resolve(upath.dirname(options.swSrc), url);
182 - const sourcemapDestPath = upath.resolve(upath.dirname(options.swDest), url);
183 - let originalMap;
184 -
185 - try {
186 - originalMap = await fse.readJSON(sourcemapSrcPath, 'utf8');
187 - } catch (error) {
188 - throw new Error(`${errors['cant-find-sourcemap']} ${error.message}`);
189 - }
190 -
191 - const {
192 - map,
193 - source
194 - } = await replaceAndUpdateSourceMap({
195 - originalMap,
196 - jsFilename: upath.basename(options.swDest),
197 - originalSource: swFileContents,
198 - replaceString: manifestString,
199 - searchString: options.injectionPoint
200 - });
201 - filesToWrite[options.swDest] = source;
202 - filesToWrite[sourcemapDestPath] = map;
203 - } else {
204 - // If there's no sourcemap associated with swSrc, a simple string
205 - // replacement will suffice.
206 - filesToWrite[options.swDest] = swFileContents.replace(globalRegexp, manifestString);
207 - }
208 -
209 - for (const [file, contents] of Object.entries(filesToWrite)) {
210 - try {
211 - await fse.mkdirp(upath.dirname(file));
212 - } catch (error) {
213 - throw new Error(errors['unable-to-make-injection-directory'] + ` '${error.message}'`);
214 - }
215 -
216 - await fse.writeFile(file, contents);
217 - }
218 -
219 - return {
220 - count,
221 - size,
222 - warnings,
223 - // Use upath.resolve() to make all the paths absolute.
224 - filePaths: Object.keys(filesToWrite).map(f => upath.resolve(f))
225 - };
226 -}
227 -
228 -module.exports = injectManifest;
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const errors = require('./errors');
11 -
12 -module.exports = additionalManifestEntries => {
13 - return manifest => {
14 - const warnings = [];
15 - const stringEntries = new Set();
16 -
17 - for (const additionalEntry of additionalManifestEntries) {
18 - // Warn about either a string or an object that lacks a precache property.
19 - // (An object with a revision property set to null is okay.)
20 - if (typeof additionalEntry === 'string') {
21 - stringEntries.add(additionalEntry);
22 - } else if (additionalEntry && additionalEntry.revision === undefined) {
23 - stringEntries.add(additionalEntry.url);
24 - }
25 -
26 - manifest.push(additionalEntry);
27 - }
28 -
29 - if (stringEntries.size > 0) {
30 - let urls = '\n';
31 -
32 - for (const stringEntry of stringEntries) {
33 - urls += ` - ${stringEntry}\n`;
34 - }
35 -
36 - warnings.push(errors['string-entry-warning'] + urls);
37 - }
38 -
39 - return {
40 - manifest,
41 - warnings
42 - };
43 - };
44 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const {
11 - rollup
12 -} = require('rollup');
13 -
14 -const {
15 - terser
16 -} = require('rollup-plugin-terser');
17 -
18 -const {
19 - writeFile
20 -} = require('fs-extra');
21 -
22 -const babel = require('rollup-plugin-babel');
23 -
24 -const omt = require('@surma/rollup-plugin-off-main-thread');
25 -
26 -const upath = require('upath');
27 -
28 -const presetEnv = require('@babel/preset-env');
29 -
30 -const replace = require('@rollup/plugin-replace');
31 -
32 -const resolve = require('@rollup/plugin-node-resolve');
33 -
34 -const tempy = require('tempy');
35 -
36 -module.exports = async ({
37 - babelPresetEnvTargets,
38 - inlineWorkboxRuntime,
39 - mode,
40 - sourcemap,
41 - swDest,
42 - unbundledCode
43 -}) => {
44 - // We need to write this to the "real" file system, as Rollup won't read from
45 - // a custom file system.
46 - const {
47 - dir,
48 - base
49 - } = upath.parse(swDest);
50 - const temporaryFile = tempy.file({
51 - name: base
52 - });
53 - await writeFile(temporaryFile, unbundledCode);
54 - const plugins = [resolve(), replace({
55 - 'process.env.NODE_ENV': JSON.stringify(mode)
56 - }), babel({
57 - // Disable the logic that checks for local Babel config files:
58 - // https://github.com/GoogleChrome/workbox/issues/2111
59 - babelrc: false,
60 - configFile: false,
61 - presets: [[presetEnv, {
62 - targets: {
63 - browsers: babelPresetEnvTargets
64 - },
65 - loose: true
66 - }]]
67 - })];
68 -
69 - if (mode === 'production') {
70 - plugins.push(terser({
71 - mangle: {
72 - toplevel: true,
73 - properties: {
74 - regex: /(^_|_$)/
75 - }
76 - }
77 - }));
78 - }
79 -
80 - const rollupConfig = {
81 - plugins,
82 - input: temporaryFile
83 - }; // Rollup will inline the runtime by default. If we don't want that, we need
84 - // to add in some additional config.
85 -
86 - if (!inlineWorkboxRuntime) {
87 - rollupConfig.plugins.unshift(omt());
88 -
89 - rollupConfig.manualChunks = id => {
90 - return id.includes('workbox') ? 'workbox' : undefined;
91 - };
92 - }
93 -
94 - const bundle = await rollup(rollupConfig);
95 - const {
96 - output
97 - } = await bundle.generate({
98 - sourcemap,
99 - // Using an external Workbox runtime requires 'amd'.
100 - format: inlineWorkboxRuntime ? 'es' : 'amd'
101 - });
102 - const files = [];
103 -
104 - for (const chunkOrAsset of output) {
105 - if (chunkOrAsset.isAsset) {
106 - files.push({
107 - name: chunkOrAsset.fileName,
108 - contents: chunkOrAsset.source
109 - });
110 - } else {
111 - let code = chunkOrAsset.code;
112 -
113 - if (chunkOrAsset.map) {
114 - const sourceMapFile = chunkOrAsset.fileName + '.map';
115 - code += `//# sourceMappingURL=${sourceMapFile}\n`;
116 - files.push({
117 - name: sourceMapFile,
118 - contents: chunkOrAsset.map.toString()
119 - });
120 - }
121 -
122 - files.push({
123 - name: chunkOrAsset.fileName,
124 - contents: code
125 - });
126 - }
127 - } // Make sure that if there was a directory portion included in swDest, it's
128 - // preprended to all of the generated files.
129 -
130 -
131 - return files.map(file => {
132 - file.name = upath.format({
133 - dir,
134 - base: file.name
135 - });
136 - return file;
137 - });
138 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const assert = require('assert');
11 -
12 -const cdn = require('../cdn-details.json');
13 -
14 -const errors = require('./errors');
15 -
16 -const getCDNOrigin = () => {
17 - return `${cdn.origin}/${cdn.bucketName}/${cdn.releasesDir}`;
18 -};
19 -
20 -const getVersionedCDNURL = () => {
21 - return `${getCDNOrigin()}/${cdn.latestVersion}`;
22 -};
23 -
24 -const getModuleURL = (moduleName, buildType) => {
25 - assert(moduleName, errors['no-module-name']);
26 -
27 - if (buildType) {
28 - const pkgJson = require(`${moduleName}/package.json`);
29 -
30 - if (buildType === 'dev' && pkgJson.workbox.prodOnly) {
31 - // This is not due to a public-facing exception, so just throw an Error(),
32 - // without creating an entry in errors.js.
33 - throw Error(`The 'dev' build of ${moduleName} is not available.`);
34 - }
35 -
36 - return `${getVersionedCDNURL()}/${moduleName}.${buildType.slice(0, 4)}.js`;
37 - }
38 -
39 - return `${getVersionedCDNURL()}/${moduleName}.js`;
40 -};
41 -
42 -module.exports = {
43 - getCDNOrigin,
44 - getModuleURL
45 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const fse = require('fs-extra');
11 -
12 -const upath = require('upath');
13 -
14 -const errors = require('./errors'); // Used to filter the libraries to copy based on our package.json dependencies.
15 -
16 -
17 -const WORKBOX_PREFIX = 'workbox-'; // The directory within each package containing the final bundles.
18 -
19 -const BUILD_DIR = 'build';
20 -/**
21 - * This copies over a set of runtime libraries used by Workbox into a
22 - * local directory, which should be deployed alongside your service worker file.
23 - *
24 - * As an alternative to deploying these local copies, you could instead use
25 - * Workbox from its official CDN URL.
26 - *
27 - * This method is exposed for the benefit of developers using
28 - * [injectManifest()]{@link module:workbox-build.injectManifest} who would
29 - * prefer not to use the CDN copies of Workbox. Developers using
30 - * [generateSW()]{@link module:workbox-build.generateSW} don't need to
31 - * explicitly call this method.
32 - *
33 - * @param {string} destDirectory The path to the parent directory under which
34 - * the new directory of libraries will be created.
35 - * @return {Promise<string>} The name of the newly created directory.
36 - *
37 - * @alias module:workbox-build.copyWorkboxLibraries
38 - */
39 -
40 -module.exports = async destDirectory => {
41 - const thisPkg = require('../../package.json'); // Use the version string from workbox-build in the name of the parent
42 - // directory. This should be safe, because lerna will bump workbox-build's
43 - // pkg.version whenever one of the dependent libraries gets bumped, and we
44 - // care about versioning the dependent libraries.
45 -
46 -
47 - const workboxDirectoryName = `workbox-v${thisPkg.version}`;
48 - const workboxDirectoryPath = upath.join(destDirectory, workboxDirectoryName);
49 - await fse.ensureDir(workboxDirectoryPath);
50 - const copyPromises = [];
51 - const librariesToCopy = Object.keys(thisPkg.dependencies).filter(dependency => dependency.startsWith(WORKBOX_PREFIX));
52 -
53 - for (const library of librariesToCopy) {
54 - // Get the path to the package on the user's filesystem by require-ing
55 - // the package's `package.json` file via the node resolution algorithm.
56 - const libraryPath = upath.dirname(require.resolve(`${library}/package.json`));
57 - const buildPath = upath.join(libraryPath, BUILD_DIR); // fse.copy() copies all the files in a directory, not the directory itself.
58 - // See https://github.com/jprichardson/node-fs-extra/blob/master/docs/copy.md#copysrc-dest-options-callback
59 -
60 - copyPromises.push(fse.copy(buildPath, workboxDirectoryPath));
61 - }
62 -
63 - try {
64 - await Promise.all(copyPromises);
65 - return workboxDirectoryName;
66 - } catch (error) {
67 - throw Error(`${errors['unable-to-copy-workbox-libraries']} ${error}`);
68 - }
69 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const ol = require('common-tags').oneLine;
11 -
12 -module.exports = {
13 - 'unable-to-get-rootdir': `Unable to get the root directory of your web app.`,
14 - 'no-extension': ol`Unable to detect a usable extension for a file in your web
15 - app directory.`,
16 - 'invalid-file-manifest-name': ol`The File Manifest Name must have at least one
17 - character.`,
18 - 'unable-to-get-file-manifest-name': 'Unable to get a file manifest name.',
19 - 'invalid-sw-dest': `The 'swDest' value must be a valid path.`,
20 - 'unable-to-get-sw-name': 'Unable to get a service worker file name.',
21 - 'unable-to-get-save-config': ol`An error occurred when asking to save details
22 - in a config file.`,
23 - 'unable-to-get-file-hash': ol`An error occurred when attempting to create a
24 - file hash.`,
25 - 'unable-to-get-file-size': ol`An error occurred when attempting to get a file
26 - size.`,
27 - 'unable-to-glob-files': 'An error occurred when globbing for files.',
28 - 'unable-to-make-manifest-directory': ol`Unable to make output directory for
29 - file manifest.`,
30 - 'read-manifest-template-failure': 'Unable to read template for file manifest',
31 - 'populating-manifest-tmpl-failed': ol`An error occurred when populating the
32 - file manifest template.`,
33 - 'manifest-file-write-failure': 'Unable to write the file manifest.',
34 - 'unable-to-make-sw-directory': ol`Unable to make the directories to output
35 - the service worker path.`,
36 - 'read-sw-template-failure': ol`Unable to read the service worker template
37 - file.`,
38 - 'sw-write-failure': 'Unable to write the service worker file.',
39 - 'sw-write-failure-directory': ol`Unable to write the service worker file;
40 - 'swDest' should be a full path to the file, not a path to a directory.`,
41 - 'unable-to-copy-workbox-libraries': ol`One or more of the Workbox libraries
42 - could not be copied over to the destination directory: `,
43 - 'invalid-generate-sw-input': ol`The input to generateSW() must be an object.`,
44 - 'invalid-glob-directory': ol`The supplied globDirectory must be a path as a
45 - string.`,
46 - 'invalid-dont-cache-bust': ol`The supplied 'dontCacheBustURLsMatching'
47 - parameter must be a RegExp.`,
48 - 'invalid-exclude-files': 'The excluded files should be an array of strings.',
49 - 'invalid-get-manifest-entries-input': ol`The input to
50 - 'getFileManifestEntries()' must be an object.`,
51 - 'invalid-manifest-path': ol`The supplied manifest path is not a string with
52 - at least one character.`,
53 - 'invalid-manifest-entries': ol`The manifest entries must be an array of
54 - strings or JavaScript objects containing a url parameter.`,
55 - 'invalid-manifest-format': ol`The value of the 'format' option passed to
56 - generateFileManifest() must be either 'iife' (the default) or 'es'.`,
57 - 'invalid-static-file-globs': ol`The 'globPatterns' value must be an array
58 - of strings.`,
59 - 'invalid-templated-urls': ol`The 'templatedURLs' value should be an object
60 - that maps URLs to either a string, or to an array of glob patterns.`,
61 - 'templated-url-matches-glob': ol`One of the 'templatedURLs' URLs is already
62 - being tracked via 'globPatterns': `,
63 - 'invalid-glob-ignores': ol`The 'globIgnores' parameter must be an array of
64 - glob pattern strings.`,
65 - 'manifest-entry-bad-url': ol`The generated manifest contains an entry without
66 - a URL string. This is likely an error with workbox-build.`,
67 - 'modify-url-prefix-bad-prefixes': ol`The 'modifyURLPrefix' parameter must be
68 - an object with string key value pairs.`,
69 - 'invalid-inject-manifest-arg': ol`The input to 'injectManifest()' must be an
70 - object.`,
71 - 'injection-point-not-found': ol`Unable to find a place to inject the manifest.
72 - Please ensure that your service worker file contains the following: `,
73 - 'multiple-injection-points': ol`Please ensure that your 'swSrc' file contains
74 - only one match for the following: `,
75 - 'populating-sw-tmpl-failed': ol`Unable to generate service worker from
76 - template.`,
77 - 'useless-glob-pattern': ol`One of the glob patterns doesn't match any files.
78 - Please remove or fix the following: `,
79 - 'bad-template-urls-asset': ol`There was an issue using one of the provided
80 - 'templatedURLs'.`,
81 - 'invalid-runtime-caching': ol`The 'runtimeCaching' parameter must an an
82 - array of objects with at least a 'urlPattern' and 'handler'.`,
83 - 'static-file-globs-deprecated': ol`'staticFileGlobs' is deprecated.
84 - Please use 'globPatterns' instead.`,
85 - 'dynamic-url-deprecated': ol`'dynamicURLToDependencies' is deprecated.
86 - Please use 'templatedURLs' instead.`,
87 - 'urlPattern-is-required': ol`The 'urlPattern' option is required when using
88 - 'runtimeCaching'.`,
89 - 'handler-is-required': ol`The 'handler' option is required when using
90 - runtimeCaching.`,
91 - 'invalid-generate-file-manifest-arg': ol`The input to generateFileManifest()
92 - must be an Object.`,
93 - 'invalid-sw-src': `The 'swSrc' file can't be read.`,
94 - 'same-src-and-dest': ol`Unable to find a place to inject the manifest. This is
95 - likely because swSrc and swDest are configured to the same file.
96 - Please ensure that your swSrc file contains the following:`,
97 - 'only-regexp-routes-supported': ol`Please use a regular expression object as
98 - the urlPattern parameter. (Express-style routes are not currently
99 - supported.)`,
100 - 'bad-runtime-caching-config': ol`An unknown configuration option was used
101 - with runtimeCaching: `,
102 - 'invalid-network-timeout-seconds': ol`When using networkTimeoutSeconds, you
103 - must set the handler to 'NetworkFirst'.`,
104 - 'no-module-name': ol`You must provide a moduleName parameter when calling
105 - getModuleURL().`,
106 - 'bad-manifest-transforms-return-value': ol`The return value from a
107 - manifestTransform should be an object with 'manifest' and optionally
108 - 'warnings' properties.`,
109 - 'string-entry-warning': ol`Some items were passed to additionalManifestEntries
110 - without revisioning info. This is generally NOT safe. Learn more at
111 - https://bit.ly/wb-precache.`,
112 - 'no-manifest-entries-or-runtime-caching': ol`Couldn't find configuration for
113 - either precaching or runtime caching. Please ensure that the various glob
114 - options are set to match one or more files, and/or configure the
115 - runtimeCaching option.`,
116 - 'cant-find-sourcemap': ol`The swSrc file refers to a sourcemap that can't be
117 - opened:`
118 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions
11 -module.exports = str => str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const crypto = require('crypto');
11 -
12 -module.exports = (compositeURL, dependencyDetails) => {
13 - let totalSize = 0;
14 - let compositeHash = '';
15 -
16 - for (const fileDetails of dependencyDetails) {
17 - totalSize += fileDetails.size;
18 - compositeHash += fileDetails.hash;
19 - }
20 -
21 - const md5 = crypto.createHash('md5');
22 - md5.update(compositeHash);
23 - const hashOfHashes = md5.digest('hex');
24 - return {
25 - file: compositeURL,
26 - hash: hashOfHashes,
27 - size: totalSize
28 - };
29 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const glob = require('glob');
11 -
12 -const upath = require('upath');
13 -
14 -const errors = require('./errors');
15 -
16 -const getFileSize = require('./get-file-size');
17 -
18 -const getFileHash = require('./get-file-hash');
19 -
20 -module.exports = ({
21 - globDirectory,
22 - globFollow,
23 - globIgnores,
24 - globPattern,
25 - globStrict
26 -}) => {
27 - let globbedFiles;
28 - let warning;
29 -
30 - try {
31 - globbedFiles = glob.sync(globPattern, {
32 - cwd: globDirectory,
33 - follow: globFollow,
34 - ignore: globIgnores,
35 - strict: globStrict
36 - });
37 - } catch (err) {
38 - throw new Error(errors['unable-to-glob-files'] + ` '${err.message}'`);
39 - }
40 -
41 - if (globbedFiles.length === 0) {
42 - warning = errors['useless-glob-pattern'] + ' ' + JSON.stringify({
43 - globDirectory,
44 - globPattern,
45 - globIgnores
46 - }, null, 2);
47 - }
48 -
49 - const fileDetails = globbedFiles.map(file => {
50 - const fullPath = upath.join(globDirectory, file);
51 - const fileSize = getFileSize(fullPath);
52 -
53 - if (fileSize === null) {
54 - return null;
55 - }
56 -
57 - const fileHash = getFileHash(fullPath);
58 - return {
59 - file: `${upath.relative(globDirectory, fullPath)}`,
60 - hash: fileHash,
61 - size: fileSize
62 - };
63 - }); // If !== null, means it's a valid file.
64 -
65 - const globbedFileDetails = fileDetails.filter(details => details !== null);
66 - return {
67 - globbedFileDetails,
68 - warning
69 - };
70 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const fs = require('fs');
11 -
12 -const getStringHash = require('./get-string-hash');
13 -
14 -const errors = require('./errors');
15 -
16 -module.exports = file => {
17 - try {
18 - const buffer = fs.readFileSync(file);
19 - return getStringHash(buffer);
20 - } catch (err) {
21 - throw new Error(errors['unable-to-get-file-hash'] + ` '${err.message}'`);
22 - }
23 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const assert = require('assert');
11 -
12 -const errors = require('./errors');
13 -
14 -const transformManifest = require('./transform-manifest');
15 -
16 -const getCompositeDetails = require('./get-composite-details');
17 -
18 -const getFileDetails = require('./get-file-details');
19 -
20 -const getStringDetails = require('./get-string-details');
21 -
22 -module.exports = async ({
23 - additionalManifestEntries,
24 - dontCacheBustURLsMatching,
25 - globDirectory,
26 - globFollow,
27 - globIgnores,
28 - globPatterns,
29 - globStrict,
30 - manifestTransforms,
31 - maximumFileSizeToCacheInBytes,
32 - modifyURLPrefix,
33 - swDest,
34 - templatedURLs
35 -}) => {
36 - const warnings = []; // Initialize to an empty array so that we can still pass something to
37 - // transformManifest() and get a normalized output.
38 -
39 - let fileDetails = [];
40 - const fileSet = new Set();
41 -
42 - if (globDirectory) {
43 - try {
44 - fileDetails = globPatterns.reduce((accumulated, globPattern) => {
45 - const {
46 - globbedFileDetails,
47 - warning
48 - } = getFileDetails({
49 - globDirectory,
50 - globFollow,
51 - globIgnores,
52 - globPattern,
53 - globStrict
54 - });
55 -
56 - if (warning) {
57 - warnings.push(warning);
58 - }
59 -
60 - globbedFileDetails.forEach(fileDetails => {
61 - if (fileSet.has(fileDetails.file)) {
62 - return;
63 - }
64 -
65 - fileSet.add(fileDetails.file);
66 - accumulated.push(fileDetails);
67 - });
68 - return accumulated;
69 - }, []);
70 - } catch (error) {
71 - // If there's an exception thrown while globbing, then report
72 - // it back as a warning, and don't consider it fatal.
73 - warnings.push(error.message);
74 - }
75 - }
76 -
77 - if (templatedURLs) {
78 - for (const url of Object.keys(templatedURLs)) {
79 - assert(!fileSet.has(url), errors['templated-url-matches-glob']);
80 - const dependencies = templatedURLs[url];
81 -
82 - if (Array.isArray(dependencies)) {
83 - const details = dependencies.reduce((previous, globPattern) => {
84 - try {
85 - const {
86 - globbedFileDetails,
87 - warning
88 - } = getFileDetails({
89 - globDirectory,
90 - globFollow,
91 - globIgnores,
92 - globPattern,
93 - globStrict
94 - });
95 -
96 - if (warning) {
97 - warnings.push(warning);
98 - }
99 -
100 - return previous.concat(globbedFileDetails);
101 - } catch (error) {
102 - const debugObj = {};
103 - debugObj[url] = dependencies;
104 - throw new Error(`${errors['bad-template-urls-asset']} ` + `'${globPattern}' from '${JSON.stringify(debugObj)}':\n` + error);
105 - }
106 - }, []);
107 - fileDetails.push(getCompositeDetails(url, details));
108 - } else if (typeof dependencies === 'string') {
109 - fileDetails.push(getStringDetails(url, dependencies));
110 - }
111 - }
112 - }
113 -
114 - const transformedManifest = await transformManifest({
115 - additionalManifestEntries,
116 - dontCacheBustURLsMatching,
117 - fileDetails,
118 - manifestTransforms,
119 - maximumFileSizeToCacheInBytes,
120 - modifyURLPrefix
121 - });
122 -
123 - if (warnings.length > 0) {
124 - transformedManifest.warnings.push(...warnings);
125 - }
126 -
127 - return transformedManifest;
128 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const fs = require('fs');
11 -
12 -const errors = require('./errors');
13 -
14 -module.exports = file => {
15 - try {
16 - const stat = fs.statSync(file);
17 -
18 - if (!stat.isFile()) {
19 - return null;
20 - }
21 -
22 - return stat.size;
23 - } catch (err) {
24 - throw new Error(errors['unable-to-get-file-size'] + ` '${err.message}'`);
25 - }
26 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const getStringHash = require('./get-string-hash');
11 -
12 -module.exports = (url, string) => {
13 - return {
14 - file: url,
15 - hash: getStringHash(string),
16 - size: string.length
17 - };
18 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const crypto = require('crypto');
11 -
12 -module.exports = string => {
13 - const md5 = crypto.createHash('md5');
14 - md5.update(string);
15 - return md5.digest('hex');
16 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const prettyBytes = require('pretty-bytes');
11 -
12 -module.exports = maximumFileSizeToCacheInBytes => {
13 - return originalManifest => {
14 - const warnings = [];
15 - const manifest = originalManifest.filter(entry => {
16 - if (entry.size <= maximumFileSizeToCacheInBytes) {
17 - return true;
18 - }
19 -
20 - warnings.push(`${entry.url} is ${prettyBytes(entry.size)}, and won't ` + `be precached. Configure maximumFileSizeToCacheInBytes to change ` + `this limit.`);
21 - return false;
22 - });
23 - return {
24 - manifest,
25 - warnings
26 - };
27 - };
28 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const errors = require('./errors');
11 -
12 -const escapeRegExp = require('./escape-regexp');
13 -
14 -module.exports = modifyURLPrefix => {
15 - if (!modifyURLPrefix || typeof modifyURLPrefix !== 'object' || Array.isArray(modifyURLPrefix)) {
16 - throw new Error(errors['modify-url-prefix-bad-prefixes']);
17 - } // If there are no entries in modifyURLPrefix, just return an identity
18 - // function as a shortcut.
19 -
20 -
21 - if (Object.keys(modifyURLPrefix).length === 0) {
22 - return entry => entry;
23 - }
24 -
25 - Object.keys(modifyURLPrefix).forEach(key => {
26 - if (typeof modifyURLPrefix[key] !== 'string') {
27 - throw new Error(errors['modify-url-prefix-bad-prefixes']);
28 - }
29 - }); // Escape the user input so it's safe to use in a regex.
30 -
31 - const safeModifyURLPrefixes = Object.keys(modifyURLPrefix).map(escapeRegExp); // Join all the `modifyURLPrefix` keys so a single regex can be used.
32 -
33 - const prefixMatchesStrings = safeModifyURLPrefixes.join('|'); // Add `^` to the front the prefix matches so it only matches the start of
34 - // a string.
35 -
36 - const modifyRegex = new RegExp(`^(${prefixMatchesStrings})`);
37 - return originalManifest => {
38 - const manifest = originalManifest.map(entry => {
39 - if (typeof entry.url !== 'string') {
40 - throw new Error(errors['manifest-entry-bad-url']);
41 - }
42 -
43 - entry.url = entry.url.replace(modifyRegex, match => {
44 - return modifyURLPrefix[match];
45 - });
46 - return entry;
47 - });
48 - return {
49 - manifest
50 - };
51 - };
52 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const ol = require('common-tags').oneLine;
11 -
12 -const upath = require('upath');
13 -/**
14 - * Class for keeping track of which Workbox modules are used by the generated
15 - * service worker script.
16 - *
17 - * @private
18 - */
19 -
20 -
21 -class ModuleRegistry {
22 - /**
23 - * @private
24 - */
25 - constructor() {
26 - this.modulesUsed = new Map();
27 - }
28 - /**
29 - * @return {Array<string>} A list of all of the import statements that are
30 - * needed for the modules being used.
31 - * @private
32 - */
33 -
34 -
35 - getImportStatements() {
36 - const workboxModuleImports = [];
37 -
38 - for (const [localName, {
39 - moduleName,
40 - pkg
41 - }] of this.modulesUsed) {
42 - // By default require.resolve returns the resolved path of the 'main'
43 - // field, which might be deeper than the package root. To work around
44 - // this, we can find the package's root by resolving its package.json and
45 - // strip the '/package.json' from the resolved path.
46 - const pkgJsonPath = require.resolve(`${pkg}/package.json`);
47 -
48 - const pkgRoot = upath.dirname(pkgJsonPath);
49 - const importStatement = ol`import {${moduleName} as ${localName}} from
50 - '${pkgRoot}/${moduleName}.mjs';`;
51 - workboxModuleImports.push(importStatement);
52 - }
53 -
54 - return workboxModuleImports;
55 - }
56 - /**
57 - * @param {string} pkg The workbox package that the module belongs to.
58 - * @param {string} moduleName The name of the module to import.
59 - * @return {string} The local variable name that corresponds to that module.
60 - * @private
61 - */
62 -
63 -
64 - getLocalName(pkg, moduleName) {
65 - return `${pkg.replace(/-/g, '_')}_${moduleName}`;
66 - }
67 - /**
68 - * @param {string} pkg The workbox package that the module belongs to.
69 - * @param {string} moduleName The name of the module to import.
70 - * @return {string} The local variable name that corresponds to that module.
71 - * @private
72 - */
73 -
74 -
75 - use(pkg, moduleName) {
76 - const localName = this.getLocalName(pkg, moduleName);
77 - this.modulesUsed.set(localName, {
78 - moduleName,
79 - pkg
80 - });
81 - return localName;
82 - }
83 -
84 -}
85 -
86 -module.exports = ModuleRegistry;
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const errors = require('./errors');
11 -
12 -module.exports = regexp => {
13 - if (!(regexp instanceof RegExp)) {
14 - throw new Error(errors['invalid-dont-cache-bust']);
15 - }
16 -
17 - return originalManifest => {
18 - const manifest = originalManifest.map(entry => {
19 - if (typeof entry.url !== 'string') {
20 - throw new Error(errors['manifest-entry-bad-url']);
21 - }
22 -
23 - if (entry.url.match(regexp)) {
24 - entry.revision = null;
25 - }
26 -
27 - return entry;
28 - });
29 - return {
30 - manifest
31 - };
32 - };
33 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const template = require('lodash.template');
11 -
12 -const swTemplate = require('../templates/sw-template');
13 -
14 -const errors = require('./errors');
15 -
16 -const ModuleRegistry = require('./module-registry');
17 -
18 -const runtimeCachingConverter = require('./runtime-caching-converter');
19 -
20 -const stringifyWithoutComments = require('./stringify-without-comments');
21 -
22 -module.exports = ({
23 - cacheId,
24 - cleanupOutdatedCaches,
25 - clientsClaim,
26 - directoryIndex,
27 - disableDevLogs,
28 - ignoreURLParametersMatching,
29 - importScripts,
30 - manifestEntries = [],
31 - navigateFallback,
32 - navigateFallbackDenylist,
33 - navigateFallbackAllowlist,
34 - navigationPreload,
35 - offlineGoogleAnalytics,
36 - runtimeCaching = [],
37 - skipWaiting
38 -}) => {
39 - // There needs to be at least something to precache, or else runtime caching.
40 - if (!(manifestEntries.length > 0 || runtimeCaching.length > 0)) {
41 - throw new Error(errors['no-manifest-entries-or-runtime-caching']);
42 - } // These are all options that can be passed to the precacheAndRoute() method.
43 -
44 -
45 - const precacheOptions = {
46 - directoryIndex,
47 - // An array of RegExp objects can't be serialized by JSON.stringify()'s
48 - // default behavior, so if it's given, convert it manually.
49 - ignoreURLParametersMatching: ignoreURLParametersMatching ? [] : undefined
50 - };
51 - let precacheOptionsString = JSON.stringify(precacheOptions, null, 2);
52 -
53 - if (ignoreURLParametersMatching) {
54 - precacheOptionsString = precacheOptionsString.replace(`"ignoreURLParametersMatching": []`, `"ignoreURLParametersMatching": [` + `${ignoreURLParametersMatching.join(', ')}]`);
55 - }
56 -
57 - let offlineAnalyticsConfigString;
58 -
59 - if (offlineGoogleAnalytics) {
60 - // If offlineGoogleAnalytics is a truthy value, we need to convert it to the
61 - // format expected by the template.
62 - offlineAnalyticsConfigString = offlineGoogleAnalytics === true ? // If it's the literal value true, then use an empty config string.
63 - '{}' : // Otherwise, convert the config object into a more complex string, taking
64 - // into account the fact that functions might need to be stringified.
65 - stringifyWithoutComments(offlineGoogleAnalytics);
66 - }
67 -
68 - const moduleRegistry = new ModuleRegistry();
69 -
70 - try {
71 - const populatedTemplate = template(swTemplate)({
72 - cacheId,
73 - cleanupOutdatedCaches,
74 - clientsClaim,
75 - disableDevLogs,
76 - importScripts,
77 - manifestEntries,
78 - navigateFallback,
79 - navigateFallbackDenylist,
80 - navigateFallbackAllowlist,
81 - navigationPreload,
82 - offlineAnalyticsConfigString,
83 - precacheOptionsString,
84 - runtimeCaching: runtimeCachingConverter(moduleRegistry, runtimeCaching),
85 - skipWaiting,
86 - use: moduleRegistry.use.bind(moduleRegistry)
87 - });
88 - const workboxImportStatements = moduleRegistry.getImportStatements(); // We need the import statements for all of the Workbox runtime modules
89 - // prepended, so that the correct bundle can be created.
90 -
91 - return workboxImportStatements.join('\n') + populatedTemplate;
92 - } catch (error) {
93 - throw new Error(`${errors['populating-sw-tmpl-failed']} '${error.message}'`);
94 - }
95 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const upath = require('upath');
11 -
12 -module.exports = ({
13 - baseDirectory,
14 - file
15 -}) => {
16 - // The initial path is relative to the current directory, so make it absolute.
17 - const absolutePath = upath.resolve(file); // Convert the absolute path so that it's relative to the baseDirectory.
18 -
19 - const relativePath = upath.relative(baseDirectory, absolutePath); // Remove any leading ./ as it won't work in a glob pattern.
20 -
21 - const normalizedPath = upath.normalize(relativePath);
22 - return normalizedPath;
23 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const {
11 - SourceMapConsumer,
12 - SourceMapGenerator
13 -} = require('source-map');
14 -/**
15 - * Adapted from https://github.com/nsams/sourcemap-aware-replace, with modern
16 - * JavaScript updates, along with additional properties copied from originalMap.
17 - *
18 - * @param {Object} options
19 - * @param {string} options.jsFilename The name for the file whose contents
20 - * correspond to originalSource.
21 - * @param {Object} options.originalMap The sourcemap for originalSource,
22 - * prior to any replacements.
23 - * @param {string} options.originalSource The source code, prior to any
24 - * replacements.
25 - * @param {string} options.replaceString A string to swap in for searchString.
26 - * @param {string} options.searchString A string in originalSource to replace.
27 - * Only the first occurrence will be replaced.
28 - * @return {{source: string, map: string}} An object containing both
29 - * originalSource with the replacement applied, and the modified originalMap.
30 - *
31 - * @private
32 - */
33 -
34 -
35 -async function replaceAndUpdateSourceMap({
36 - jsFilename,
37 - originalMap,
38 - originalSource,
39 - replaceString,
40 - searchString
41 -}) {
42 - const generator = new SourceMapGenerator({
43 - file: jsFilename
44 - });
45 - const consumer = await new SourceMapConsumer(originalMap);
46 - let pos;
47 - let src = originalSource;
48 - const replacements = [];
49 - let lineNum = 0;
50 - let filePos = 0;
51 - const lines = src.split('\n');
52 -
53 - for (let line of lines) {
54 - lineNum++;
55 - let searchPos = 0;
56 -
57 - while ((pos = line.indexOf(searchString, searchPos)) !== -1) {
58 - src = src.substring(0, filePos + pos) + replaceString + src.substring(filePos + pos + searchString.length);
59 - line = line.substring(0, pos) + replaceString + line.substring(pos + searchString.length);
60 - replacements.push({
61 - line: lineNum,
62 - column: pos
63 - });
64 - searchPos = pos + replaceString.length;
65 - }
66 -
67 - filePos += line.length + 1;
68 - }
69 -
70 - replacements.reverse();
71 - consumer.eachMapping(mapping => {
72 - for (const replacement of replacements) {
73 - if (replacement.line == mapping.generatedLine && mapping.generatedColumn > replacement.column) {
74 - const offset = searchString.length - replaceString.length;
75 - mapping.generatedColumn -= offset;
76 - }
77 - }
78 -
79 - if (mapping.source) {
80 - const newMapping = {
81 - generated: {
82 - line: mapping.generatedLine,
83 - column: mapping.generatedColumn
84 - },
85 - original: {
86 - line: mapping.originalLine,
87 - column: mapping.originalColumn
88 - },
89 - source: mapping.source
90 - };
91 - return generator.addMapping(newMapping);
92 - }
93 -
94 - return mapping;
95 - });
96 - consumer.destroy();
97 - const updatedSourceMap = Object.assign(JSON.parse(generator.toString()), {
98 - names: originalMap.names,
99 - sourceRoot: originalMap.sourceRoot,
100 - sources: originalMap.sources,
101 - sourcesContent: originalMap.sourcesContent
102 - });
103 - return {
104 - map: JSON.stringify(updatedSourceMap),
105 - source: src
106 - };
107 -}
108 -
109 -module.exports = replaceAndUpdateSourceMap;
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const ol = require('common-tags').oneLine;
11 -
12 -const errors = require('./errors');
13 -
14 -const stringifyWithoutComments = require('./stringify-without-comments');
15 -/**
16 - * Given a set of options that configures runtime caching behavior, convert it
17 - * to the equivalent Workbox method calls.
18 - *
19 - * @param {ModuleRegistry} moduleRegistry
20 - * @param {Object} options See
21 - * https://developers.google.com/web/tools/workbox/modules/workbox-build#generateSW-runtimeCaching
22 - * @return {string} A JSON string representing the equivalent options.
23 - *
24 - * @private
25 - */
26 -
27 -
28 -function getOptionsString(moduleRegistry, options = {}) {
29 - let plugins = [];
30 -
31 - if (options.plugins) {
32 - // Using libs because JSON.stringify won't handle functions.
33 - plugins = options.plugins.map(stringifyWithoutComments);
34 - delete options.plugins;
35 - } // Pull handler-specific config from the options object, since they are
36 - // not directly used to construct a plugin instance. If set, need to be
37 - // passed as options to the handler constructor instead.
38 -
39 -
40 - const handlerOptionKeys = ['cacheName', 'networkTimeoutSeconds', 'fetchOptions', 'matchOptions'];
41 - const handlerOptions = {};
42 -
43 - for (const key of handlerOptionKeys) {
44 - if (key in options) {
45 - handlerOptions[key] = options[key];
46 - delete options[key];
47 - }
48 - }
49 -
50 - for (const [pluginName, pluginConfig] of Object.entries(options)) {
51 - // Ensure that we have some valid configuration to pass to the plugin.
52 - if (Object.keys(pluginConfig).length === 0) {
53 - continue;
54 - }
55 -
56 - let pluginCode;
57 -
58 - switch (pluginName) {
59 - case 'backgroundSync':
60 - {
61 - const name = pluginConfig.name;
62 - const plugin = moduleRegistry.use('workbox-background-sync', 'BackgroundSyncPlugin');
63 - pluginCode = `new ${plugin}(${JSON.stringify(name)}`;
64 -
65 - if ('options' in pluginConfig) {
66 - pluginCode += `, ${stringifyWithoutComments(pluginConfig.options)}`;
67 - }
68 -
69 - pluginCode += `)`;
70 - break;
71 - }
72 -
73 - case 'broadcastUpdate':
74 - {
75 - const channelName = pluginConfig.channelName;
76 - const opts = Object.assign({
77 - channelName
78 - }, pluginConfig.options);
79 - const plugin = moduleRegistry.use('workbox-broadcast-update', 'BroadcastUpdatePlugin');
80 - pluginCode = `new ${plugin}(${stringifyWithoutComments(opts)})`;
81 - break;
82 - }
83 -
84 - case 'cacheableResponse':
85 - {
86 - const plugin = moduleRegistry.use('workbox-cacheable-response', 'CacheableResponsePlugin');
87 - pluginCode = `new ${plugin}(${stringifyWithoutComments(pluginConfig)})`;
88 - break;
89 - }
90 -
91 - case 'expiration':
92 - {
93 - const plugin = moduleRegistry.use('workbox-expiration', 'ExpirationPlugin');
94 - pluginCode = `new ${plugin}(${stringifyWithoutComments(pluginConfig)})`;
95 - break;
96 - }
97 -
98 - default:
99 - {
100 - throw new Error(errors['bad-runtime-caching-config'] + pluginName);
101 - }
102 - }
103 -
104 - plugins.push(pluginCode);
105 - }
106 -
107 - if (Object.keys(handlerOptions).length > 0 || plugins.length > 0) {
108 - const optionsString = JSON.stringify(handlerOptions).slice(1, -1);
109 - return ol`{
110 - ${optionsString ? optionsString + ',' : ''}
111 - plugins: [${plugins.join(', ')}]
112 - }`;
113 - } else {
114 - return '';
115 - }
116 -}
117 -
118 -module.exports = (moduleRegistry, runtimeCaching) => {
119 - return runtimeCaching.map(entry => {
120 - const method = entry.method || 'GET';
121 -
122 - if (!entry.urlPattern) {
123 - throw new Error(errors['urlPattern-is-required']);
124 - }
125 -
126 - if (!entry.handler) {
127 - throw new Error(errors['handler-is-required']);
128 - } // This validation logic is a bit too gnarly for joi, so it's manually
129 - // implemented here.
130 -
131 -
132 - if (entry.options && entry.options.networkTimeoutSeconds && entry.handler !== 'NetworkFirst') {
133 - throw new Error(errors['invalid-network-timeout-seconds']);
134 - } // urlPattern might be a string, a RegExp object, or a function.
135 - // If it's a string, it needs to be quoted.
136 -
137 -
138 - const matcher = typeof entry.urlPattern === 'string' ? JSON.stringify(entry.urlPattern) : entry.urlPattern;
139 - const registerRoute = moduleRegistry.use('workbox-routing', 'registerRoute');
140 -
141 - if (typeof entry.handler === 'string') {
142 - const optionsString = getOptionsString(moduleRegistry, entry.options);
143 - const handler = moduleRegistry.use('workbox-strategies', entry.handler);
144 - const strategyString = `new ${handler}(${optionsString})`;
145 - return `${registerRoute}(${matcher}, ${strategyString}, '${method}');\n`;
146 - } else if (typeof entry.handler === 'function') {
147 - return `${registerRoute}(${matcher}, ${entry.handler}, '${method}');\n`;
148 - }
149 - }).filter(entry => Boolean(entry)); // Remove undefined map() return values.
150 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const objectStringify = require('stringify-object');
11 -
12 -const stripComments = require('strip-comments');
13 -
14 -module.exports = obj => {
15 - return objectStringify(obj, {
16 - transform: (_obj, _prop, str) => typeof _obj[_prop] === 'function' ? stripComments(str) : str
17 - });
18 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const errors = require('./errors');
11 -
12 -const additionalManifestEntriesTransform = require('./additional-manifest-entries-transform');
13 -
14 -const maximumSizeTransform = require('./maximum-size-transform');
15 -
16 -const modifyURLPrefixTransform = require('./modify-url-prefix-transform');
17 -
18 -const noRevisionForURLsMatchingTransform = require('./no-revision-for-urls-matching-transform');
19 -/**
20 - * A `ManifestTransform` function can be used to modify the modify the `url` or
21 - * `revision` properties of some or all of the
22 - * {@link module:workbox-build.ManifestEntry|ManifestEntries} in the manifest.
23 - *
24 - * Deleting the `revision` property of an entry will cause
25 - * the corresponding `url` to be precached without cache-busting parameters
26 - * applied, which is to say, it implies that the URL itself contains
27 - * proper versioning info. If the `revision` property is present, it must be
28 - * set to a string.
29 - *
30 - * @example A transformation that prepended the origin of a CDN for any
31 - * URL starting with '/assets/' could be implemented as:
32 - *
33 - * const cdnTransform = async (manifestEntries) => {
34 - * const manifest = manifestEntries.map(entry => {
35 - * const cdnOrigin = 'https://example.com';
36 - * if (entry.url.startsWith('/assets/')) {
37 - * entry.url = cdnOrigin + entry.url;
38 - * }
39 - * return entry;
40 - * });
41 - * return {manifest, warnings: []};
42 - * };
43 - *
44 - * @example A transformation that nulls the revision field when the
45 - * URL contains an 8-character hash surrounded by '.', indicating that it
46 - * already contains revision information:
47 - *
48 - * const removeRevisionTransform = async (manifestEntries) => {
49 - * const manifest = manifestEntries.map(entry => {
50 - * const hashRegExp = /\.\w{8}\./;
51 - * if (entry.url.match(hashRegExp)) {
52 - * entry.revision = null;
53 - * }
54 - * return entry;
55 - * });
56 - * return {manifest, warnings: []};
57 - * };
58 - *
59 - * @callback ManifestTransform
60 - * @param {Array<module:workbox-build.ManifestEntry>} manifestEntries The full
61 - * array of entries, prior to the current transformation.
62 - * @param {Object} [compilation] When used in the webpack plugins, this param
63 - * will be set to the current `compilation`.
64 - * @return {Promise<module:workbox-build.ManifestTransformResult>}
65 - * The array of entries with the transformation applied, and optionally, any
66 - * warnings that should be reported back to the build tool.
67 - *
68 - * @memberof module:workbox-build
69 - */
70 -
71 -
72 -module.exports = async ({
73 - additionalManifestEntries,
74 - dontCacheBustURLsMatching,
75 - fileDetails,
76 - manifestTransforms,
77 - maximumFileSizeToCacheInBytes,
78 - modifyURLPrefix,
79 - transformParam
80 -}) => {
81 - let allWarnings = []; // Take the array of fileDetail objects and convert it into an array of
82 - // {url, revision, size} objects, with \ replaced with /.
83 -
84 - const normalizedManifest = fileDetails.map(fileDetails => {
85 - return {
86 - url: fileDetails.file.replace(/\\/g, '/'),
87 - revision: fileDetails.hash,
88 - size: fileDetails.size
89 - };
90 - });
91 - const transformsToApply = [];
92 -
93 - if (maximumFileSizeToCacheInBytes) {
94 - transformsToApply.push(maximumSizeTransform(maximumFileSizeToCacheInBytes));
95 - }
96 -
97 - if (modifyURLPrefix) {
98 - transformsToApply.push(modifyURLPrefixTransform(modifyURLPrefix));
99 - }
100 -
101 - if (dontCacheBustURLsMatching) {
102 - transformsToApply.push(noRevisionForURLsMatchingTransform(dontCacheBustURLsMatching));
103 - } // Run any manifestTransforms functions second-to-last.
104 -
105 -
106 - if (manifestTransforms) {
107 - transformsToApply.push(...manifestTransforms);
108 - } // Run additionalManifestEntriesTransform last.
109 -
110 -
111 - if (additionalManifestEntries) {
112 - transformsToApply.push(additionalManifestEntriesTransform(additionalManifestEntries));
113 - }
114 -
115 - let transformedManifest = normalizedManifest;
116 -
117 - for (const transform of transformsToApply) {
118 - const result = await transform(transformedManifest, transformParam);
119 -
120 - if (!('manifest' in result)) {
121 - throw new Error(errors['bad-manifest-transforms-return-value']);
122 - }
123 -
124 - transformedManifest = result.manifest;
125 - allWarnings = allWarnings.concat(result.warnings || []);
126 - } // Generate some metadata about the manifest before we clear out the size
127 - // properties from each entry.
128 -
129 -
130 - const count = transformedManifest.length;
131 - let size = 0;
132 -
133 - for (const manifestEntry of transformedManifest) {
134 - size += manifestEntry.size || 0;
135 - delete manifestEntry.size;
136 - }
137 -
138 - return {
139 - count,
140 - size,
141 - manifestEntries: transformedManifest,
142 - warnings: allWarnings
143 - };
144 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -module.exports = (options, schema) => {
11 - const {
12 - value,
13 - error
14 - } = schema.validate(options, {
15 - language: {
16 - object: {
17 - allowUnknown: 'is not a supported parameter.'
18 - }
19 - }
20 - });
21 -
22 - if (error) {
23 - throw error;
24 - }
25 -
26 - return value;
27 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const fse = require('fs-extra');
11 -
12 -const upath = require('upath');
13 -
14 -const bundle = require('./bundle');
15 -
16 -const errors = require('./errors');
17 -
18 -const populateSWTemplate = require('./populate-sw-template');
19 -
20 -module.exports = async ({
21 - babelPresetEnvTargets,
22 - cacheId,
23 - cleanupOutdatedCaches,
24 - clientsClaim,
25 - directoryIndex,
26 - disableDevLogs,
27 - ignoreURLParametersMatching,
28 - importScripts,
29 - inlineWorkboxRuntime,
30 - manifestEntries,
31 - mode,
32 - navigateFallback,
33 - navigateFallbackDenylist,
34 - navigateFallbackAllowlist,
35 - navigationPreload,
36 - offlineGoogleAnalytics,
37 - runtimeCaching,
38 - skipWaiting,
39 - sourcemap,
40 - swDest
41 -}) => {
42 - const outputDir = upath.dirname(swDest);
43 -
44 - try {
45 - await fse.mkdirp(outputDir);
46 - } catch (error) {
47 - throw new Error(`${errors['unable-to-make-sw-directory']}. ` + `'${error.message}'`);
48 - }
49 -
50 - const unbundledCode = populateSWTemplate({
51 - cacheId,
52 - cleanupOutdatedCaches,
53 - clientsClaim,
54 - directoryIndex,
55 - disableDevLogs,
56 - ignoreURLParametersMatching,
57 - importScripts,
58 - manifestEntries,
59 - navigateFallback,
60 - navigateFallbackDenylist,
61 - navigateFallbackAllowlist,
62 - navigationPreload,
63 - offlineGoogleAnalytics,
64 - runtimeCaching,
65 - skipWaiting
66 - });
67 -
68 - try {
69 - const files = await bundle({
70 - babelPresetEnvTargets,
71 - inlineWorkboxRuntime,
72 - mode,
73 - sourcemap,
74 - swDest,
75 - unbundledCode
76 - });
77 - const filePaths = [];
78 -
79 - for (const file of files) {
80 - const filePath = upath.resolve(file.name);
81 - filePaths.push(filePath);
82 - await fse.writeFile(filePath, file.contents);
83 - }
84 -
85 - return filePaths;
86 - } catch (error) {
87 - if (error.code === 'EISDIR') {
88 - // See https://github.com/GoogleChrome/workbox/issues/612
89 - throw new Error(errors['sw-write-failure-directory']);
90 - }
91 -
92 - throw new Error(`${errors['sw-write-failure']} '${error.message}'`);
93 - }
94 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -module.exports = {
11 - babelPresetEnvTargets: ['chrome >= 56'],
12 - cleanupOutdatedCaches: false,
13 - clientsClaim: false,
14 - compileSrc: true,
15 - disableDevLogs: false,
16 - exclude: [/\.map$/, /^manifest.*\.js$/],
17 - globFollow: true,
18 - globIgnores: ['**/node_modules/**/*'],
19 - globPatterns: ['**/*.{js,css,html}'],
20 - globStrict: true,
21 - injectionPoint: 'self.__WB_MANIFEST',
22 - inlineWorkboxRuntime: false,
23 - maximumFileSizeToCacheInBytes: 2 * 1024 * 1024,
24 - mode: 'production',
25 - navigateFallback: undefined,
26 - navigationPreload: false,
27 - offlineGoogleAnalytics: false,
28 - purgeOnQuotaError: true,
29 - skipWaiting: false,
30 - sourcemap: true,
31 - swDestFilename: 'service-worker.js'
32 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -module.exports = joi.object().keys({
13 - revision: joi.string().required().allow(null),
14 - url: joi.string().required()
15 -});
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -module.exports = joi.object().type(RegExp).error(() => 'the value must be a RegExp');
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -const defaults = require('../defaults');
13 -
14 -const manifestEntryObject = require('../objects/manifest-entry');
15 -
16 -const regExpObject = require('../objects/reg-exp');
17 -
18 -module.exports = {
19 - additionalManifestEntries: joi.array().items(joi.string(), manifestEntryObject),
20 - dontCacheBustURLsMatching: regExpObject,
21 - manifestTransforms: joi.array().items(joi.func().minArity(1).maxArity(2)),
22 - maximumFileSizeToCacheInBytes: joi.number().min(1).default(defaults.maximumFileSizeToCacheInBytes),
23 - mode: joi.string().default(process.env.NODE_ENV || defaults.mode),
24 - modifyURLPrefix: joi.object()
25 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -const defaults = require('../defaults');
13 -
14 -const regExpObject = require('../objects/reg-exp');
15 -
16 -module.exports = {
17 - babelPresetEnvTargets: joi.array().items(joi.string()).default(defaults.babelPresetEnvTargets),
18 - cacheId: joi.string(),
19 - cleanupOutdatedCaches: joi.boolean().default(defaults.cleanupOutdatedCaches),
20 - clientsClaim: joi.boolean().default(defaults.clientsClaim),
21 - directoryIndex: joi.string(),
22 - disableDevLogs: joi.boolean().default(defaults.disableDevLogs),
23 - ignoreURLParametersMatching: joi.array().items(regExpObject),
24 - importScripts: joi.array().items(joi.string()),
25 - inlineWorkboxRuntime: joi.boolean().default(defaults.inlineWorkboxRuntime),
26 - navigateFallback: joi.string().default(defaults.navigateFallback),
27 - navigateFallbackAllowlist: joi.array().items(regExpObject),
28 - navigateFallbackBlacklist: joi.forbidden().error(new Error('navigateFallbackBlacklist has been renamed navigateFallbackDenylist.')),
29 - navigateFallbackDenylist: joi.array().items(regExpObject),
30 - navigateFallbackWhitelist: joi.forbidden().error(new Error('navigateFallbackWhitelist has been renamed navigateFallbackAllowlist.')),
31 - navigationPreload: joi.boolean().default(defaults.navigationPreload),
32 - offlineGoogleAnalytics: joi.alternatives().try(joi.boolean(), joi.object()).default(defaults.offlineGoogleAnalytics),
33 - runtimeCaching: joi.array().items(joi.object().keys({
34 - method: joi.string().valid('DELETE', 'GET', 'HEAD', 'PATCH', 'POST', 'PUT'),
35 - urlPattern: [regExpObject, joi.string(), joi.func()],
36 - handler: [joi.func(), joi.string().valid('CacheFirst', 'CacheOnly', 'NetworkFirst', 'NetworkOnly', 'StaleWhileRevalidate')],
37 - options: joi.object().keys({
38 - backgroundSync: joi.object().keys({
39 - name: joi.string().required(),
40 - options: joi.object()
41 - }),
42 - broadcastUpdate: joi.object().keys({
43 - channelName: joi.string().required(),
44 - options: joi.object()
45 - }),
46 - cacheableResponse: joi.object().keys({
47 - statuses: joi.array().items(joi.number().min(0).max(599)),
48 - headers: joi.object()
49 - }).or('statuses', 'headers'),
50 - cacheName: joi.string(),
51 - expiration: joi.object().keys({
52 - maxEntries: joi.number().min(1),
53 - maxAgeSeconds: joi.number().min(1),
54 - purgeOnQuotaError: joi.boolean().default(defaults.purgeOnQuotaError)
55 - }).or('maxEntries', 'maxAgeSeconds'),
56 - networkTimeoutSeconds: joi.number().min(1),
57 - plugins: joi.array().items(joi.object()),
58 - fetchOptions: joi.object(),
59 - matchOptions: joi.object()
60 - }).with('expiration', 'cacheName')
61 - }).requiredKeys('urlPattern', 'handler')).when('navigationPreload', {
62 - is: true,
63 - then: joi.required()
64 - }),
65 - skipWaiting: joi.boolean().default(defaults.skipWaiting),
66 - sourcemap: joi.boolean().default(defaults.sourcemap)
67 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -const defaults = require('../defaults');
13 -
14 -module.exports = {
15 - globDirectory: joi.string(),
16 - globFollow: joi.boolean().default(defaults.globFollow),
17 - globIgnores: joi.array().items(joi.string()).default(defaults.globIgnores),
18 - globPatterns: joi.array().items(joi.string()).default(defaults.globPatterns),
19 - globStrict: joi.boolean().default(defaults.globStrict),
20 - // templatedURLs is an object where any property name is valid, and the values
21 - // can be either a string or an array of strings.
22 - templatedURLs: joi.object().pattern(/./, [joi.string(), joi.array().items(joi.string())])
23 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -const defaults = require('../defaults');
13 -
14 -module.exports = {
15 - injectionPoint: joi.string().default(defaults.injectionPoint),
16 - swSrc: joi.string().required()
17 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -const defaults = require('../defaults');
13 -
14 -const regExpObject = require('../objects/reg-exp');
15 -
16 -module.exports = {
17 - chunks: joi.array().items(joi.string()),
18 - exclude: joi.array().items(joi.string(), regExpObject, joi.func().arity(1)).default(defaults.exclude),
19 - excludeChunks: joi.array().items(joi.string()),
20 - include: joi.array().items(joi.string(), regExpObject, joi.func().arity(1))
21 -};
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -const basePartial = require('../partials/base');
13 -
14 -const generatePartial = require('../partials/generate');
15 -
16 -const globPartial = require('../partials/glob');
17 -
18 -const supportedOptions = Object.assign({
19 - swDest: joi.string().required().regex(/\.js$/)
20 -}, basePartial, generatePartial, globPartial);
21 -module.exports = joi.object().keys(supportedOptions);
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -const basePartial = require('../partials/base');
13 -
14 -const globPartial = require('../partials/glob');
15 -
16 -const supportedOptions = Object.assign({}, basePartial, globPartial);
17 -module.exports = joi.object().keys(supportedOptions);
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -const basePartial = require('../partials/base');
13 -
14 -const globPartial = require('../partials/glob');
15 -
16 -const injectPartial = require('../partials/inject');
17 -
18 -const supportedOptions = Object.assign({
19 - swDest: joi.string().required().regex(/\.js$/)
20 -}, basePartial, globPartial, injectPartial);
21 -module.exports = joi.object().keys(supportedOptions);
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -const basePartial = require('../partials/base');
13 -
14 -const defaults = require('../defaults');
15 -
16 -const generatePartial = require('../partials/generate');
17 -
18 -const webpackPartial = require('../partials/webpack');
19 -
20 -const supportedOptions = Object.assign({
21 - importScriptsViaChunks: joi.array().items(joi.string()),
22 - swDest: joi.string().default(defaults.swDestFilename)
23 -}, basePartial, generatePartial, webpackPartial);
24 -module.exports = joi.object().keys(supportedOptions);
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2019 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -const joi = require('@hapi/joi');
11 -
12 -const upath = require('upath');
13 -
14 -const basePartial = require('../partials/base');
15 -
16 -const defaults = require('../defaults');
17 -
18 -const injectPartial = require('../partials/inject');
19 -
20 -const webpackPartial = require('../partials/webpack'); // See https://github.com/hapijs/joi/blob/v16.0.0-rc2/API.md#anydefaultvalue-description
21 -
22 -
23 -const swSrcBasename = context => {
24 - const {
25 - name
26 - } = upath.parse(context.swSrc); // Always use the .js extension when generating a default filename.
27 -
28 - return name + '.js';
29 -};
30 -
31 -swSrcBasename.description = 'derived from the swSrc file name';
32 -const supportedOptions = Object.assign({
33 - compileSrc: joi.boolean().default(defaults.compileSrc),
34 - webpackCompilationPlugins: joi.array().items(joi.object()).when('compileSrc', {
35 - is: false,
36 - then: joi.forbidden()
37 - })
38 -}, basePartial, injectPartial, webpackPartial);
39 -module.exports = joi.object().keys(supportedOptions).keys({
40 - // List this separately, so that the swSrc validation happens first.
41 - swDest: joi.string().default(swSrcBasename)
42 -});
...\ No newline at end of file ...\ No newline at end of file
1 -"use strict";
2 -
3 -/*
4 - Copyright 2018 Google LLC
5 -
6 - Use of this source code is governed by an MIT-style
7 - license that can be found in the LICENSE file or at
8 - https://opensource.org/licenses/MIT.
9 -*/
10 -module.exports = `/**
11 - * Welcome to your Workbox-powered service worker!
12 - *
13 - * You'll need to register this file in your web app.
14 - * See https://goo.gl/nhQhGp
15 - *
16 - * The rest of the code is auto-generated. Please don't update this file
17 - * directly; instead, make changes to your Workbox build configuration
18 - * and re-run your build process.
19 - * See https://goo.gl/2aRDsh
20 - */
21 -
22 -<% if (importScripts) { %>
23 -importScripts(
24 - <%= importScripts.map(JSON.stringify).join(',\\n ') %>
25 -);
26 -<% } %>
27 -
28 -<% if (navigationPreload) { %><%= use('workbox-navigation-preload', 'enable') %>();<% } %>
29 -
30 -<% if (cacheId) { %><%= use('workbox-core', 'setCacheNameDetails') %>({prefix: <%= JSON.stringify(cacheId) %>});<% } %>
31 -
32 -<% if (skipWaiting) { %>
33 -<%= use('workbox-core', 'skipWaiting') %>();
34 -<% } else { %>
35 -self.addEventListener('message', (event) => {
36 - if (event.data && event.data.type === 'SKIP_WAITING') {
37 - self.skipWaiting();
38 - }
39 -});
40 -<% } %>
41 -<% if (clientsClaim) { %><%= use('workbox-core', 'clientsClaim') %>();<% } %>
42 -
43 -<% if (Array.isArray(manifestEntries) && manifestEntries.length > 0) {%>
44 -/**
45 - * The precacheAndRoute() method efficiently caches and responds to
46 - * requests for URLs in the manifest.
47 - * See https://goo.gl/S9QRab
48 - */
49 -<%= use('workbox-precaching', 'precacheAndRoute') %>(<%= JSON.stringify(manifestEntries, null, 2) %>, <%= precacheOptionsString %>);
50 -<% if (cleanupOutdatedCaches) { %><%= use('workbox-precaching', 'cleanupOutdatedCaches') %>();<% } %>
51 -<% if (navigateFallback) { %><%= use('workbox-routing', 'registerRoute') %>(new <%= use('workbox-routing', 'NavigationRoute') %>(<%= use('workbox-precaching', 'createHandlerBoundToURL') %>(<%= JSON.stringify(navigateFallback) %>)<% if (navigateFallbackAllowlist || navigateFallbackDenylist) { %>, {
52 - <% if (navigateFallbackAllowlist) { %>allowlist: [<%= navigateFallbackAllowlist %>],<% } %>
53 - <% if (navigateFallbackDenylist) { %>denylist: [<%= navigateFallbackDenylist %>],<% } %>
54 -}<% } %>));<% } %>
55 -<% } %>
56 -
57 -<% if (runtimeCaching) { runtimeCaching.forEach(runtimeCachingString => {%><%= runtimeCachingString %><% });} %>
58 -
59 -<% if (offlineAnalyticsConfigString) { %><%= use('workbox-google-analytics', 'initialize') %>(<%= offlineAnalyticsConfigString %>);<% } %>
60 -
61 -<% if (disableDevLogs) { %>self.__WB_DISABLE_DEV_LOGS = true;<% } %>`;
...\ No newline at end of file ...\ No newline at end of file
1 -(The MIT License)
2 -
3 -Copyright (c) 2011-2017 JP Richardson
4 -
5 -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
6 -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
7 - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
8 - furnished to do so, subject to the following conditions:
9 -
10 -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
11 -
12 -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
13 -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
14 -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
15 - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
1 -Node.js: fs-extra
2 -=================
3 -
4 -`fs-extra` adds file system methods that aren't included in the native `fs` module and adds promise support to the `fs` methods. It also uses [`graceful-fs`](https://github.com/isaacs/node-graceful-fs) to prevent `EMFILE` errors. It should be a drop in replacement for `fs`.
5 -
6 -[![npm Package](https://img.shields.io/npm/v/fs-extra.svg)](https://www.npmjs.org/package/fs-extra)
7 -[![License](https://img.shields.io/npm/l/express.svg)](https://github.com/jprichardson/node-fs-extra/blob/master/LICENSE)
8 -[![build status](https://img.shields.io/travis/jprichardson/node-fs-extra/master.svg)](http://travis-ci.org/jprichardson/node-fs-extra)
9 -[![windows Build status](https://img.shields.io/appveyor/ci/jprichardson/node-fs-extra/master.svg?label=windows%20build)](https://ci.appveyor.com/project/jprichardson/node-fs-extra/branch/master)
10 -[![downloads per month](http://img.shields.io/npm/dm/fs-extra.svg)](https://www.npmjs.org/package/fs-extra)
11 -[![Coverage Status](https://img.shields.io/coveralls/github/jprichardson/node-fs-extra/master.svg)](https://coveralls.io/github/jprichardson/node-fs-extra)
12 -[![JavaScript Style Guide](https://img.shields.io/badge/code_style-standard-brightgreen.svg)](https://standardjs.com)
13 -
14 -Why?
15 -----
16 -
17 -I got tired of including `mkdirp`, `rimraf`, and `ncp` in most of my projects.
18 -
19 -
20 -
21 -
22 -Installation
23 -------------
24 -
25 - npm install fs-extra
26 -
27 -
28 -
29 -Usage
30 ------
31 -
32 -`fs-extra` is a drop in replacement for native `fs`. All methods in `fs` are attached to `fs-extra`. All `fs` methods return promises if the callback isn't passed.
33 -
34 -You don't ever need to include the original `fs` module again:
35 -
36 -```js
37 -const fs = require('fs') // this is no longer necessary
38 -```
39 -
40 -you can now do this:
41 -
42 -```js
43 -const fs = require('fs-extra')
44 -```
45 -
46 -or if you prefer to make it clear that you're using `fs-extra` and not `fs`, you may want
47 -to name your `fs` variable `fse` like so:
48 -
49 -```js
50 -const fse = require('fs-extra')
51 -```
52 -
53 -you can also keep both, but it's redundant:
54 -
55 -```js
56 -const fs = require('fs')
57 -const fse = require('fs-extra')
58 -```
59 -
60 -Sync vs Async vs Async/Await
61 --------------
62 -Most methods are async by default. All async methods will return a promise if the callback isn't passed.
63 -
64 -Sync methods on the other hand will throw if an error occurs.
65 -
66 -Also Async/Await will throw an error if one occurs.
67 -
68 -Example:
69 -
70 -```js
71 -const fs = require('fs-extra')
72 -
73 -// Async with promises:
74 -fs.copy('/tmp/myfile', '/tmp/mynewfile')
75 - .then(() => console.log('success!'))
76 - .catch(err => console.error(err))
77 -
78 -// Async with callbacks:
79 -fs.copy('/tmp/myfile', '/tmp/mynewfile', err => {
80 - if (err) return console.error(err)
81 - console.log('success!')
82 -})
83 -
84 -// Sync:
85 -try {
86 - fs.copySync('/tmp/myfile', '/tmp/mynewfile')
87 - console.log('success!')
88 -} catch (err) {
89 - console.error(err)
90 -}
91 -
92 -// Async/Await:
93 -async function copyFiles () {
94 - try {
95 - await fs.copy('/tmp/myfile', '/tmp/mynewfile')
96 - console.log('success!')
97 - } catch (err) {
98 - console.error(err)
99 - }
100 -}
101 -
102 -copyFiles()
103 -```
104 -
105 -
106 -Methods
107 --------
108 -
109 -### Async
110 -
111 -- [copy](docs/copy.md)
112 -- [emptyDir](docs/emptyDir.md)
113 -- [ensureFile](docs/ensureFile.md)
114 -- [ensureDir](docs/ensureDir.md)
115 -- [ensureLink](docs/ensureLink.md)
116 -- [ensureSymlink](docs/ensureSymlink.md)
117 -- [mkdirp](docs/ensureDir.md)
118 -- [mkdirs](docs/ensureDir.md)
119 -- [move](docs/move.md)
120 -- [outputFile](docs/outputFile.md)
121 -- [outputJson](docs/outputJson.md)
122 -- [pathExists](docs/pathExists.md)
123 -- [readJson](docs/readJson.md)
124 -- [remove](docs/remove.md)
125 -- [writeJson](docs/writeJson.md)
126 -
127 -### Sync
128 -
129 -- [copySync](docs/copy-sync.md)
130 -- [emptyDirSync](docs/emptyDir-sync.md)
131 -- [ensureFileSync](docs/ensureFile-sync.md)
132 -- [ensureDirSync](docs/ensureDir-sync.md)
133 -- [ensureLinkSync](docs/ensureLink-sync.md)
134 -- [ensureSymlinkSync](docs/ensureSymlink-sync.md)
135 -- [mkdirpSync](docs/ensureDir-sync.md)
136 -- [mkdirsSync](docs/ensureDir-sync.md)
137 -- [moveSync](docs/move-sync.md)
138 -- [outputFileSync](docs/outputFile-sync.md)
139 -- [outputJsonSync](docs/outputJson-sync.md)
140 -- [pathExistsSync](docs/pathExists-sync.md)
141 -- [readJsonSync](docs/readJson-sync.md)
142 -- [removeSync](docs/remove-sync.md)
143 -- [writeJsonSync](docs/writeJson-sync.md)
144 -
145 -
146 -**NOTE:** You can still use the native Node.js methods. They are promisified and copied over to `fs-extra`. See [notes on `fs.read()` & `fs.write()`](docs/fs-read-write.md)
147 -
148 -### What happened to `walk()` and `walkSync()`?
149 -
150 -They were removed from `fs-extra` in v2.0.0. If you need the functionality, `walk` and `walkSync` are available as separate packages, [`klaw`](https://github.com/jprichardson/node-klaw) and [`klaw-sync`](https://github.com/manidlou/node-klaw-sync).
151 -
152 -
153 -Third Party
154 ------------
155 -
156 -
157 -### TypeScript
158 -
159 -If you like TypeScript, you can use `fs-extra` with it: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/fs-extra
160 -
161 -
162 -### File / Directory Watching
163 -
164 -If you want to watch for changes to files or directories, then you should use [chokidar](https://github.com/paulmillr/chokidar).
165 -
166 -### Obtain Filesystem (Devices, Partitions) Information
167 -
168 -[fs-filesystem](https://github.com/arthurintelligence/node-fs-filesystem) allows you to read the state of the filesystem of the host on which it is run. It returns information about both the devices and the partitions (volumes) of the system.
169 -
170 -### Misc.
171 -
172 -- [fs-extra-debug](https://github.com/jdxcode/fs-extra-debug) - Send your fs-extra calls to [debug](https://npmjs.org/package/debug).
173 -- [mfs](https://github.com/cadorn/mfs) - Monitor your fs-extra calls.
174 -
175 -
176 -
177 -Hacking on fs-extra
178 --------------------
179 -
180 -Wanna hack on `fs-extra`? Great! Your help is needed! [fs-extra is one of the most depended upon Node.js packages](http://nodei.co/npm/fs-extra.png?downloads=true&downloadRank=true&stars=true). This project
181 -uses [JavaScript Standard Style](https://github.com/feross/standard) - if the name or style choices bother you,
182 -you're gonna have to get over it :) If `standard` is good enough for `npm`, it's good enough for `fs-extra`.
183 -
184 -[![js-standard-style](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard)
185 -
186 -What's needed?
187 -- First, take a look at existing issues. Those are probably going to be where the priority lies.
188 -- More tests for edge cases. Specifically on different platforms. There can never be enough tests.
189 -- Improve test coverage. See coveralls output for more info.
190 -
191 -Note: If you make any big changes, **you should definitely file an issue for discussion first.**
192 -
193 -### Running the Test Suite
194 -
195 -fs-extra contains hundreds of tests.
196 -
197 -- `npm run lint`: runs the linter ([standard](http://standardjs.com/))
198 -- `npm run unit`: runs the unit tests
199 -- `npm test`: runs both the linter and the tests
200 -
201 -
202 -### Windows
203 -
204 -If you run the tests on the Windows and receive a lot of symbolic link `EPERM` permission errors, it's
205 -because on Windows you need elevated privilege to create symbolic links. You can add this to your Windows's
206 -account by following the instructions here: http://superuser.com/questions/104845/permission-to-make-symbolic-links-in-windows-7
207 -However, I didn't have much luck doing this.
208 -
209 -Since I develop on Mac OS X, I use VMWare Fusion for Windows testing. I create a shared folder that I map to a drive on Windows.
210 -I open the `Node.js command prompt` and run as `Administrator`. I then map the network drive running the following command:
211 -
212 - net use z: "\\vmware-host\Shared Folders"
213 -
214 -I can then navigate to my `fs-extra` directory and run the tests.
215 -
216 -
217 -Naming
218 -------
219 -
220 -I put a lot of thought into the naming of these functions. Inspired by @coolaj86's request. So he deserves much of the credit for raising the issue. See discussion(s) here:
221 -
222 -* https://github.com/jprichardson/node-fs-extra/issues/2
223 -* https://github.com/flatiron/utile/issues/11
224 -* https://github.com/ryanmcgrath/wrench-js/issues/29
225 -* https://github.com/substack/node-mkdirp/issues/17
226 -
227 -First, I believe that in as many cases as possible, the [Node.js naming schemes](http://nodejs.org/api/fs.html) should be chosen. However, there are problems with the Node.js own naming schemes.
228 -
229 -For example, `fs.readFile()` and `fs.readdir()`: the **F** is capitalized in *File* and the **d** is not capitalized in *dir*. Perhaps a bit pedantic, but they should still be consistent. Also, Node.js has chosen a lot of POSIX naming schemes, which I believe is great. See: `fs.mkdir()`, `fs.rmdir()`, `fs.chown()`, etc.
230 -
231 -We have a dilemma though. How do you consistently name methods that perform the following POSIX commands: `cp`, `cp -r`, `mkdir -p`, and `rm -rf`?
232 -
233 -My perspective: when in doubt, err on the side of simplicity. A directory is just a hierarchical grouping of directories and files. Consider that for a moment. So when you want to copy it or remove it, in most cases you'll want to copy or remove all of its contents. When you want to create a directory, if the directory that it's suppose to be contained in does not exist, then in most cases you'll want to create that too.
234 -
235 -So, if you want to remove a file or a directory regardless of whether it has contents, just call `fs.remove(path)`. If you want to copy a file or a directory whether it has contents, just call `fs.copy(source, destination)`. If you want to create a directory regardless of whether its parent directories exist, just call `fs.mkdirs(path)` or `fs.mkdirp(path)`.
236 -
237 -
238 -Credit
239 -------
240 -
241 -`fs-extra` wouldn't be possible without using the modules from the following authors:
242 -
243 -- [Isaac Shlueter](https://github.com/isaacs)
244 -- [Charlie McConnel](https://github.com/avianflu)
245 -- [James Halliday](https://github.com/substack)
246 -- [Andrew Kelley](https://github.com/andrewrk)
247 -
248 -
249 -
250 -
251 -License
252 --------
253 -
254 -Licensed under MIT
255 -
256 -Copyright (c) 2011-2017 [JP Richardson](https://github.com/jprichardson)
257 -
258 -[1]: http://nodejs.org/docs/latest/api/fs.html
259 -
260 -
261 -[jsonfile]: https://github.com/jprichardson/node-jsonfile
1 -'use strict'
2 -
3 -const fs = require('graceful-fs')
4 -const path = require('path')
5 -const mkdirpSync = require('../mkdirs').mkdirsSync
6 -const utimesSync = require('../util/utimes.js').utimesMillisSync
7 -const stat = require('../util/stat')
8 -
9 -function copySync (src, dest, opts) {
10 - if (typeof opts === 'function') {
11 - opts = { filter: opts }
12 - }
13 -
14 - opts = opts || {}
15 - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
16 - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
17 -
18 - // Warn about using preserveTimestamps on 32-bit node
19 - if (opts.preserveTimestamps && process.arch === 'ia32') {
20 - console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n
21 - see https://github.com/jprichardson/node-fs-extra/issues/269`)
22 - }
23 -
24 - const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy')
25 - stat.checkParentPathsSync(src, srcStat, dest, 'copy')
26 - return handleFilterAndCopy(destStat, src, dest, opts)
27 -}
28 -
29 -function handleFilterAndCopy (destStat, src, dest, opts) {
30 - if (opts.filter && !opts.filter(src, dest)) return
31 - const destParent = path.dirname(dest)
32 - if (!fs.existsSync(destParent)) mkdirpSync(destParent)
33 - return startCopy(destStat, src, dest, opts)
34 -}
35 -
36 -function startCopy (destStat, src, dest, opts) {
37 - if (opts.filter && !opts.filter(src, dest)) return
38 - return getStats(destStat, src, dest, opts)
39 -}
40 -
41 -function getStats (destStat, src, dest, opts) {
42 - const statSync = opts.dereference ? fs.statSync : fs.lstatSync
43 - const srcStat = statSync(src)
44 -
45 - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)
46 - else if (srcStat.isFile() ||
47 - srcStat.isCharacterDevice() ||
48 - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts)
49 - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)
50 -}
51 -
52 -function onFile (srcStat, destStat, src, dest, opts) {
53 - if (!destStat) return copyFile(srcStat, src, dest, opts)
54 - return mayCopyFile(srcStat, src, dest, opts)
55 -}
56 -
57 -function mayCopyFile (srcStat, src, dest, opts) {
58 - if (opts.overwrite) {
59 - fs.unlinkSync(dest)
60 - return copyFile(srcStat, src, dest, opts)
61 - } else if (opts.errorOnExist) {
62 - throw new Error(`'${dest}' already exists`)
63 - }
64 -}
65 -
66 -function copyFile (srcStat, src, dest, opts) {
67 - if (typeof fs.copyFileSync === 'function') {
68 - fs.copyFileSync(src, dest)
69 - fs.chmodSync(dest, srcStat.mode)
70 - if (opts.preserveTimestamps) {
71 - return utimesSync(dest, srcStat.atime, srcStat.mtime)
72 - }
73 - return
74 - }
75 - return copyFileFallback(srcStat, src, dest, opts)
76 -}
77 -
78 -function copyFileFallback (srcStat, src, dest, opts) {
79 - const BUF_LENGTH = 64 * 1024
80 - const _buff = require('../util/buffer')(BUF_LENGTH)
81 -
82 - const fdr = fs.openSync(src, 'r')
83 - const fdw = fs.openSync(dest, 'w', srcStat.mode)
84 - let pos = 0
85 -
86 - while (pos < srcStat.size) {
87 - const bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos)
88 - fs.writeSync(fdw, _buff, 0, bytesRead)
89 - pos += bytesRead
90 - }
91 -
92 - if (opts.preserveTimestamps) fs.futimesSync(fdw, srcStat.atime, srcStat.mtime)
93 -
94 - fs.closeSync(fdr)
95 - fs.closeSync(fdw)
96 -}
97 -
98 -function onDir (srcStat, destStat, src, dest, opts) {
99 - if (!destStat) return mkDirAndCopy(srcStat, src, dest, opts)
100 - if (destStat && !destStat.isDirectory()) {
101 - throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)
102 - }
103 - return copyDir(src, dest, opts)
104 -}
105 -
106 -function mkDirAndCopy (srcStat, src, dest, opts) {
107 - fs.mkdirSync(dest)
108 - copyDir(src, dest, opts)
109 - return fs.chmodSync(dest, srcStat.mode)
110 -}
111 -
112 -function copyDir (src, dest, opts) {
113 - fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts))
114 -}
115 -
116 -function copyDirItem (item, src, dest, opts) {
117 - const srcItem = path.join(src, item)
118 - const destItem = path.join(dest, item)
119 - const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy')
120 - return startCopy(destStat, srcItem, destItem, opts)
121 -}
122 -
123 -function onLink (destStat, src, dest, opts) {
124 - let resolvedSrc = fs.readlinkSync(src)
125 - if (opts.dereference) {
126 - resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
127 - }
128 -
129 - if (!destStat) {
130 - return fs.symlinkSync(resolvedSrc, dest)
131 - } else {
132 - let resolvedDest
133 - try {
134 - resolvedDest = fs.readlinkSync(dest)
135 - } catch (err) {
136 - // dest exists and is a regular file or directory,
137 - // Windows may throw UNKNOWN error. If dest already exists,
138 - // fs throws error anyway, so no need to guard against it here.
139 - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest)
140 - throw err
141 - }
142 - if (opts.dereference) {
143 - resolvedDest = path.resolve(process.cwd(), resolvedDest)
144 - }
145 - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
146 - throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)
147 - }
148 -
149 - // prevent copy if src is a subdir of dest since unlinking
150 - // dest in this case would result in removing src contents
151 - // and therefore a broken symlink would be created.
152 - if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
153 - throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)
154 - }
155 - return copyLink(resolvedSrc, dest)
156 - }
157 -}
158 -
159 -function copyLink (resolvedSrc, dest) {
160 - fs.unlinkSync(dest)
161 - return fs.symlinkSync(resolvedSrc, dest)
162 -}
163 -
164 -module.exports = copySync
1 -'use strict'
2 -
3 -module.exports = {
4 - copySync: require('./copy-sync')
5 -}
1 -'use strict'
2 -
3 -const fs = require('graceful-fs')
4 -const path = require('path')
5 -const mkdirp = require('../mkdirs').mkdirs
6 -const pathExists = require('../path-exists').pathExists
7 -const utimes = require('../util/utimes').utimesMillis
8 -const stat = require('../util/stat')
9 -
10 -function copy (src, dest, opts, cb) {
11 - if (typeof opts === 'function' && !cb) {
12 - cb = opts
13 - opts = {}
14 - } else if (typeof opts === 'function') {
15 - opts = { filter: opts }
16 - }
17 -
18 - cb = cb || function () {}
19 - opts = opts || {}
20 -
21 - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
22 - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
23 -
24 - // Warn about using preserveTimestamps on 32-bit node
25 - if (opts.preserveTimestamps && process.arch === 'ia32') {
26 - console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n
27 - see https://github.com/jprichardson/node-fs-extra/issues/269`)
28 - }
29 -
30 - stat.checkPaths(src, dest, 'copy', (err, stats) => {
31 - if (err) return cb(err)
32 - const { srcStat, destStat } = stats
33 - stat.checkParentPaths(src, srcStat, dest, 'copy', err => {
34 - if (err) return cb(err)
35 - if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb)
36 - return checkParentDir(destStat, src, dest, opts, cb)
37 - })
38 - })
39 -}
40 -
41 -function checkParentDir (destStat, src, dest, opts, cb) {
42 - const destParent = path.dirname(dest)
43 - pathExists(destParent, (err, dirExists) => {
44 - if (err) return cb(err)
45 - if (dirExists) return startCopy(destStat, src, dest, opts, cb)
46 - mkdirp(destParent, err => {
47 - if (err) return cb(err)
48 - return startCopy(destStat, src, dest, opts, cb)
49 - })
50 - })
51 -}
52 -
53 -function handleFilter (onInclude, destStat, src, dest, opts, cb) {
54 - Promise.resolve(opts.filter(src, dest)).then(include => {
55 - if (include) return onInclude(destStat, src, dest, opts, cb)
56 - return cb()
57 - }, error => cb(error))
58 -}
59 -
60 -function startCopy (destStat, src, dest, opts, cb) {
61 - if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb)
62 - return getStats(destStat, src, dest, opts, cb)
63 -}
64 -
65 -function getStats (destStat, src, dest, opts, cb) {
66 - const stat = opts.dereference ? fs.stat : fs.lstat
67 - stat(src, (err, srcStat) => {
68 - if (err) return cb(err)
69 -
70 - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb)
71 - else if (srcStat.isFile() ||
72 - srcStat.isCharacterDevice() ||
73 - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb)
74 - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb)
75 - })
76 -}
77 -
78 -function onFile (srcStat, destStat, src, dest, opts, cb) {
79 - if (!destStat) return copyFile(srcStat, src, dest, opts, cb)
80 - return mayCopyFile(srcStat, src, dest, opts, cb)
81 -}
82 -
83 -function mayCopyFile (srcStat, src, dest, opts, cb) {
84 - if (opts.overwrite) {
85 - fs.unlink(dest, err => {
86 - if (err) return cb(err)
87 - return copyFile(srcStat, src, dest, opts, cb)
88 - })
89 - } else if (opts.errorOnExist) {
90 - return cb(new Error(`'${dest}' already exists`))
91 - } else return cb()
92 -}
93 -
94 -function copyFile (srcStat, src, dest, opts, cb) {
95 - if (typeof fs.copyFile === 'function') {
96 - return fs.copyFile(src, dest, err => {
97 - if (err) return cb(err)
98 - return setDestModeAndTimestamps(srcStat, dest, opts, cb)
99 - })
100 - }
101 - return copyFileFallback(srcStat, src, dest, opts, cb)
102 -}
103 -
104 -function copyFileFallback (srcStat, src, dest, opts, cb) {
105 - const rs = fs.createReadStream(src)
106 - rs.on('error', err => cb(err)).once('open', () => {
107 - const ws = fs.createWriteStream(dest, { mode: srcStat.mode })
108 - ws.on('error', err => cb(err))
109 - .on('open', () => rs.pipe(ws))
110 - .once('close', () => setDestModeAndTimestamps(srcStat, dest, opts, cb))
111 - })
112 -}
113 -
114 -function setDestModeAndTimestamps (srcStat, dest, opts, cb) {
115 - fs.chmod(dest, srcStat.mode, err => {
116 - if (err) return cb(err)
117 - if (opts.preserveTimestamps) {
118 - return utimes(dest, srcStat.atime, srcStat.mtime, cb)
119 - }
120 - return cb()
121 - })
122 -}
123 -
124 -function onDir (srcStat, destStat, src, dest, opts, cb) {
125 - if (!destStat) return mkDirAndCopy(srcStat, src, dest, opts, cb)
126 - if (destStat && !destStat.isDirectory()) {
127 - return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`))
128 - }
129 - return copyDir(src, dest, opts, cb)
130 -}
131 -
132 -function mkDirAndCopy (srcStat, src, dest, opts, cb) {
133 - fs.mkdir(dest, err => {
134 - if (err) return cb(err)
135 - copyDir(src, dest, opts, err => {
136 - if (err) return cb(err)
137 - return fs.chmod(dest, srcStat.mode, cb)
138 - })
139 - })
140 -}
141 -
142 -function copyDir (src, dest, opts, cb) {
143 - fs.readdir(src, (err, items) => {
144 - if (err) return cb(err)
145 - return copyDirItems(items, src, dest, opts, cb)
146 - })
147 -}
148 -
149 -function copyDirItems (items, src, dest, opts, cb) {
150 - const item = items.pop()
151 - if (!item) return cb()
152 - return copyDirItem(items, item, src, dest, opts, cb)
153 -}
154 -
155 -function copyDirItem (items, item, src, dest, opts, cb) {
156 - const srcItem = path.join(src, item)
157 - const destItem = path.join(dest, item)
158 - stat.checkPaths(srcItem, destItem, 'copy', (err, stats) => {
159 - if (err) return cb(err)
160 - const { destStat } = stats
161 - startCopy(destStat, srcItem, destItem, opts, err => {
162 - if (err) return cb(err)
163 - return copyDirItems(items, src, dest, opts, cb)
164 - })
165 - })
166 -}
167 -
168 -function onLink (destStat, src, dest, opts, cb) {
169 - fs.readlink(src, (err, resolvedSrc) => {
170 - if (err) return cb(err)
171 - if (opts.dereference) {
172 - resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
173 - }
174 -
175 - if (!destStat) {
176 - return fs.symlink(resolvedSrc, dest, cb)
177 - } else {
178 - fs.readlink(dest, (err, resolvedDest) => {
179 - if (err) {
180 - // dest exists and is a regular file or directory,
181 - // Windows may throw UNKNOWN error. If dest already exists,
182 - // fs throws error anyway, so no need to guard against it here.
183 - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb)
184 - return cb(err)
185 - }
186 - if (opts.dereference) {
187 - resolvedDest = path.resolve(process.cwd(), resolvedDest)
188 - }
189 - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
190 - return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`))
191 - }
192 -
193 - // do not copy if src is a subdir of dest since unlinking
194 - // dest in this case would result in removing src contents
195 - // and therefore a broken symlink would be created.
196 - if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
197 - return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`))
198 - }
199 - return copyLink(resolvedSrc, dest, cb)
200 - })
201 - }
202 - })
203 -}
204 -
205 -function copyLink (resolvedSrc, dest, cb) {
206 - fs.unlink(dest, err => {
207 - if (err) return cb(err)
208 - return fs.symlink(resolvedSrc, dest, cb)
209 - })
210 -}
211 -
212 -module.exports = copy
1 -'use strict'
2 -
3 -const u = require('universalify').fromCallback
4 -module.exports = {
5 - copy: u(require('./copy'))
6 -}
1 -'use strict'
2 -
3 -const u = require('universalify').fromCallback
4 -const fs = require('graceful-fs')
5 -const path = require('path')
6 -const mkdir = require('../mkdirs')
7 -const remove = require('../remove')
8 -
9 -const emptyDir = u(function emptyDir (dir, callback) {
10 - callback = callback || function () {}
11 - fs.readdir(dir, (err, items) => {
12 - if (err) return mkdir.mkdirs(dir, callback)
13 -
14 - items = items.map(item => path.join(dir, item))
15 -
16 - deleteItem()
17 -
18 - function deleteItem () {
19 - const item = items.pop()
20 - if (!item) return callback()
21 - remove.remove(item, err => {
22 - if (err) return callback(err)
23 - deleteItem()
24 - })
25 - }
26 - })
27 -})
28 -
29 -function emptyDirSync (dir) {
30 - let items
31 - try {
32 - items = fs.readdirSync(dir)
33 - } catch (err) {
34 - return mkdir.mkdirsSync(dir)
35 - }
36 -
37 - items.forEach(item => {
38 - item = path.join(dir, item)
39 - remove.removeSync(item)
40 - })
41 -}
42 -
43 -module.exports = {
44 - emptyDirSync,
45 - emptydirSync: emptyDirSync,
46 - emptyDir,
47 - emptydir: emptyDir
48 -}
1 -'use strict'
2 -
3 -const u = require('universalify').fromCallback
4 -const path = require('path')
5 -const fs = require('graceful-fs')
6 -const mkdir = require('../mkdirs')
7 -const pathExists = require('../path-exists').pathExists
8 -
9 -function createFile (file, callback) {
10 - function makeFile () {
11 - fs.writeFile(file, '', err => {
12 - if (err) return callback(err)
13 - callback()
14 - })
15 - }
16 -
17 - fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err
18 - if (!err && stats.isFile()) return callback()
19 - const dir = path.dirname(file)
20 - pathExists(dir, (err, dirExists) => {
21 - if (err) return callback(err)
22 - if (dirExists) return makeFile()
23 - mkdir.mkdirs(dir, err => {
24 - if (err) return callback(err)
25 - makeFile()
26 - })
27 - })
28 - })
29 -}
30 -
31 -function createFileSync (file) {
32 - let stats
33 - try {
34 - stats = fs.statSync(file)
35 - } catch (e) {}
36 - if (stats && stats.isFile()) return
37 -
38 - const dir = path.dirname(file)
39 - if (!fs.existsSync(dir)) {
40 - mkdir.mkdirsSync(dir)
41 - }
42 -
43 - fs.writeFileSync(file, '')
44 -}
45 -
46 -module.exports = {
47 - createFile: u(createFile),
48 - createFileSync
49 -}
1 -'use strict'
2 -
3 -const file = require('./file')
4 -const link = require('./link')
5 -const symlink = require('./symlink')
6 -
7 -module.exports = {
8 - // file
9 - createFile: file.createFile,
10 - createFileSync: file.createFileSync,
11 - ensureFile: file.createFile,
12 - ensureFileSync: file.createFileSync,
13 - // link
14 - createLink: link.createLink,
15 - createLinkSync: link.createLinkSync,
16 - ensureLink: link.createLink,
17 - ensureLinkSync: link.createLinkSync,
18 - // symlink
19 - createSymlink: symlink.createSymlink,
20 - createSymlinkSync: symlink.createSymlinkSync,
21 - ensureSymlink: symlink.createSymlink,
22 - ensureSymlinkSync: symlink.createSymlinkSync
23 -}
1 -'use strict'
2 -
3 -const u = require('universalify').fromCallback
4 -const path = require('path')
5 -const fs = require('graceful-fs')
6 -const mkdir = require('../mkdirs')
7 -const pathExists = require('../path-exists').pathExists
8 -
9 -function createLink (srcpath, dstpath, callback) {
10 - function makeLink (srcpath, dstpath) {
11 - fs.link(srcpath, dstpath, err => {
12 - if (err) return callback(err)
13 - callback(null)
14 - })
15 - }
16 -
17 - pathExists(dstpath, (err, destinationExists) => {
18 - if (err) return callback(err)
19 - if (destinationExists) return callback(null)
20 - fs.lstat(srcpath, (err) => {
21 - if (err) {
22 - err.message = err.message.replace('lstat', 'ensureLink')
23 - return callback(err)
24 - }
25 -
26 - const dir = path.dirname(dstpath)
27 - pathExists(dir, (err, dirExists) => {
28 - if (err) return callback(err)
29 - if (dirExists) return makeLink(srcpath, dstpath)
30 - mkdir.mkdirs(dir, err => {
31 - if (err) return callback(err)
32 - makeLink(srcpath, dstpath)
33 - })
34 - })
35 - })
36 - })
37 -}
38 -
39 -function createLinkSync (srcpath, dstpath) {
40 - const destinationExists = fs.existsSync(dstpath)
41 - if (destinationExists) return undefined
42 -
43 - try {
44 - fs.lstatSync(srcpath)
45 - } catch (err) {
46 - err.message = err.message.replace('lstat', 'ensureLink')
47 - throw err
48 - }
49 -
50 - const dir = path.dirname(dstpath)
51 - const dirExists = fs.existsSync(dir)
52 - if (dirExists) return fs.linkSync(srcpath, dstpath)
53 - mkdir.mkdirsSync(dir)
54 -
55 - return fs.linkSync(srcpath, dstpath)
56 -}
57 -
58 -module.exports = {
59 - createLink: u(createLink),
60 - createLinkSync
61 -}
1 -'use strict'
2 -
3 -const path = require('path')
4 -const fs = require('graceful-fs')
5 -const pathExists = require('../path-exists').pathExists
6 -
7 -/**
8 - * Function that returns two types of paths, one relative to symlink, and one
9 - * relative to the current working directory. Checks if path is absolute or
10 - * relative. If the path is relative, this function checks if the path is
11 - * relative to symlink or relative to current working directory. This is an
12 - * initiative to find a smarter `srcpath` to supply when building symlinks.
13 - * This allows you to determine which path to use out of one of three possible
14 - * types of source paths. The first is an absolute path. This is detected by
15 - * `path.isAbsolute()`. When an absolute path is provided, it is checked to
16 - * see if it exists. If it does it's used, if not an error is returned
17 - * (callback)/ thrown (sync). The other two options for `srcpath` are a
18 - * relative url. By default Node's `fs.symlink` works by creating a symlink
19 - * using `dstpath` and expects the `srcpath` to be relative to the newly
20 - * created symlink. If you provide a `srcpath` that does not exist on the file
21 - * system it results in a broken symlink. To minimize this, the function
22 - * checks to see if the 'relative to symlink' source file exists, and if it
23 - * does it will use it. If it does not, it checks if there's a file that
24 - * exists that is relative to the current working directory, if does its used.
25 - * This preserves the expectations of the original fs.symlink spec and adds
26 - * the ability to pass in `relative to current working direcotry` paths.
27 - */
28 -
29 -function symlinkPaths (srcpath, dstpath, callback) {
30 - if (path.isAbsolute(srcpath)) {
31 - return fs.lstat(srcpath, (err) => {
32 - if (err) {
33 - err.message = err.message.replace('lstat', 'ensureSymlink')
34 - return callback(err)
35 - }
36 - return callback(null, {
37 - 'toCwd': srcpath,
38 - 'toDst': srcpath
39 - })
40 - })
41 - } else {
42 - const dstdir = path.dirname(dstpath)
43 - const relativeToDst = path.join(dstdir, srcpath)
44 - return pathExists(relativeToDst, (err, exists) => {
45 - if (err) return callback(err)
46 - if (exists) {
47 - return callback(null, {
48 - 'toCwd': relativeToDst,
49 - 'toDst': srcpath
50 - })
51 - } else {
52 - return fs.lstat(srcpath, (err) => {
53 - if (err) {
54 - err.message = err.message.replace('lstat', 'ensureSymlink')
55 - return callback(err)
56 - }
57 - return callback(null, {
58 - 'toCwd': srcpath,
59 - 'toDst': path.relative(dstdir, srcpath)
60 - })
61 - })
62 - }
63 - })
64 - }
65 -}
66 -
67 -function symlinkPathsSync (srcpath, dstpath) {
68 - let exists
69 - if (path.isAbsolute(srcpath)) {
70 - exists = fs.existsSync(srcpath)
71 - if (!exists) throw new Error('absolute srcpath does not exist')
72 - return {
73 - 'toCwd': srcpath,
74 - 'toDst': srcpath
75 - }
76 - } else {
77 - const dstdir = path.dirname(dstpath)
78 - const relativeToDst = path.join(dstdir, srcpath)
79 - exists = fs.existsSync(relativeToDst)
80 - if (exists) {
81 - return {
82 - 'toCwd': relativeToDst,
83 - 'toDst': srcpath
84 - }
85 - } else {
86 - exists = fs.existsSync(srcpath)
87 - if (!exists) throw new Error('relative srcpath does not exist')
88 - return {
89 - 'toCwd': srcpath,
90 - 'toDst': path.relative(dstdir, srcpath)
91 - }
92 - }
93 - }
94 -}
95 -
96 -module.exports = {
97 - symlinkPaths,
98 - symlinkPathsSync
99 -}
1 -'use strict'
2 -
3 -const fs = require('graceful-fs')
4 -
5 -function symlinkType (srcpath, type, callback) {
6 - callback = (typeof type === 'function') ? type : callback
7 - type = (typeof type === 'function') ? false : type
8 - if (type) return callback(null, type)
9 - fs.lstat(srcpath, (err, stats) => {
10 - if (err) return callback(null, 'file')
11 - type = (stats && stats.isDirectory()) ? 'dir' : 'file'
12 - callback(null, type)
13 - })
14 -}
15 -
16 -function symlinkTypeSync (srcpath, type) {
17 - let stats
18 -
19 - if (type) return type
20 - try {
21 - stats = fs.lstatSync(srcpath)
22 - } catch (e) {
23 - return 'file'
24 - }
25 - return (stats && stats.isDirectory()) ? 'dir' : 'file'
26 -}
27 -
28 -module.exports = {
29 - symlinkType,
30 - symlinkTypeSync
31 -}
1 -'use strict'
2 -
3 -const u = require('universalify').fromCallback
4 -const path = require('path')
5 -const fs = require('graceful-fs')
6 -const _mkdirs = require('../mkdirs')
7 -const mkdirs = _mkdirs.mkdirs
8 -const mkdirsSync = _mkdirs.mkdirsSync
9 -
10 -const _symlinkPaths = require('./symlink-paths')
11 -const symlinkPaths = _symlinkPaths.symlinkPaths
12 -const symlinkPathsSync = _symlinkPaths.symlinkPathsSync
13 -
14 -const _symlinkType = require('./symlink-type')
15 -const symlinkType = _symlinkType.symlinkType
16 -const symlinkTypeSync = _symlinkType.symlinkTypeSync
17 -
18 -const pathExists = require('../path-exists').pathExists
19 -
20 -function createSymlink (srcpath, dstpath, type, callback) {
21 - callback = (typeof type === 'function') ? type : callback
22 - type = (typeof type === 'function') ? false : type
23 -
24 - pathExists(dstpath, (err, destinationExists) => {
25 - if (err) return callback(err)
26 - if (destinationExists) return callback(null)
27 - symlinkPaths(srcpath, dstpath, (err, relative) => {
28 - if (err) return callback(err)
29 - srcpath = relative.toDst
30 - symlinkType(relative.toCwd, type, (err, type) => {
31 - if (err) return callback(err)
32 - const dir = path.dirname(dstpath)
33 - pathExists(dir, (err, dirExists) => {
34 - if (err) return callback(err)
35 - if (dirExists) return fs.symlink(srcpath, dstpath, type, callback)
36 - mkdirs(dir, err => {
37 - if (err) return callback(err)
38 - fs.symlink(srcpath, dstpath, type, callback)
39 - })
40 - })
41 - })
42 - })
43 - })
44 -}
45 -
46 -function createSymlinkSync (srcpath, dstpath, type) {
47 - const destinationExists = fs.existsSync(dstpath)
48 - if (destinationExists) return undefined
49 -
50 - const relative = symlinkPathsSync(srcpath, dstpath)
51 - srcpath = relative.toDst
52 - type = symlinkTypeSync(relative.toCwd, type)
53 - const dir = path.dirname(dstpath)
54 - const exists = fs.existsSync(dir)
55 - if (exists) return fs.symlinkSync(srcpath, dstpath, type)
56 - mkdirsSync(dir)
57 - return fs.symlinkSync(srcpath, dstpath, type)
58 -}
59 -
60 -module.exports = {
61 - createSymlink: u(createSymlink),
62 - createSymlinkSync
63 -}
1 -'use strict'
2 -// This is adapted from https://github.com/normalize/mz
3 -// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors
4 -const u = require('universalify').fromCallback
5 -const fs = require('graceful-fs')
6 -
7 -const api = [
8 - 'access',
9 - 'appendFile',
10 - 'chmod',
11 - 'chown',
12 - 'close',
13 - 'copyFile',
14 - 'fchmod',
15 - 'fchown',
16 - 'fdatasync',
17 - 'fstat',
18 - 'fsync',
19 - 'ftruncate',
20 - 'futimes',
21 - 'lchown',
22 - 'lchmod',
23 - 'link',
24 - 'lstat',
25 - 'mkdir',
26 - 'mkdtemp',
27 - 'open',
28 - 'readFile',
29 - 'readdir',
30 - 'readlink',
31 - 'realpath',
32 - 'rename',
33 - 'rmdir',
34 - 'stat',
35 - 'symlink',
36 - 'truncate',
37 - 'unlink',
38 - 'utimes',
39 - 'writeFile'
40 -].filter(key => {
41 - // Some commands are not available on some systems. Ex:
42 - // fs.copyFile was added in Node.js v8.5.0
43 - // fs.mkdtemp was added in Node.js v5.10.0
44 - // fs.lchown is not available on at least some Linux
45 - return typeof fs[key] === 'function'
46 -})
47 -
48 -// Export all keys:
49 -Object.keys(fs).forEach(key => {
50 - if (key === 'promises') {
51 - // fs.promises is a getter property that triggers ExperimentalWarning
52 - // Don't re-export it here, the getter is defined in "lib/index.js"
53 - return
54 - }
55 - exports[key] = fs[key]
56 -})
57 -
58 -// Universalify async methods:
59 -api.forEach(method => {
60 - exports[method] = u(fs[method])
61 -})
62 -
63 -// We differ from mz/fs in that we still ship the old, broken, fs.exists()
64 -// since we are a drop-in replacement for the native module
65 -exports.exists = function (filename, callback) {
66 - if (typeof callback === 'function') {
67 - return fs.exists(filename, callback)
68 - }
69 - return new Promise(resolve => {
70 - return fs.exists(filename, resolve)
71 - })
72 -}
73 -
74 -// fs.read() & fs.write need special treatment due to multiple callback args
75 -
76 -exports.read = function (fd, buffer, offset, length, position, callback) {
77 - if (typeof callback === 'function') {
78 - return fs.read(fd, buffer, offset, length, position, callback)
79 - }
80 - return new Promise((resolve, reject) => {
81 - fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => {
82 - if (err) return reject(err)
83 - resolve({ bytesRead, buffer })
84 - })
85 - })
86 -}
87 -
88 -// Function signature can be
89 -// fs.write(fd, buffer[, offset[, length[, position]]], callback)
90 -// OR
91 -// fs.write(fd, string[, position[, encoding]], callback)
92 -// We need to handle both cases, so we use ...args
93 -exports.write = function (fd, buffer, ...args) {
94 - if (typeof args[args.length - 1] === 'function') {
95 - return fs.write(fd, buffer, ...args)
96 - }
97 -
98 - return new Promise((resolve, reject) => {
99 - fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => {
100 - if (err) return reject(err)
101 - resolve({ bytesWritten, buffer })
102 - })
103 - })
104 -}
105 -
106 -// fs.realpath.native only available in Node v9.2+
107 -if (typeof fs.realpath.native === 'function') {
108 - exports.realpath.native = u(fs.realpath.native)
109 -}
1 -'use strict'
2 -
3 -module.exports = Object.assign(
4 - {},
5 - // Export promiseified graceful-fs:
6 - require('./fs'),
7 - // Export extra methods:
8 - require('./copy-sync'),
9 - require('./copy'),
10 - require('./empty'),
11 - require('./ensure'),
12 - require('./json'),
13 - require('./mkdirs'),
14 - require('./move-sync'),
15 - require('./move'),
16 - require('./output'),
17 - require('./path-exists'),
18 - require('./remove')
19 -)
20 -
21 -// Export fs.promises as a getter property so that we don't trigger
22 -// ExperimentalWarning before fs.promises is actually accessed.
23 -const fs = require('fs')
24 -if (Object.getOwnPropertyDescriptor(fs, 'promises')) {
25 - Object.defineProperty(module.exports, 'promises', {
26 - get () { return fs.promises }
27 - })
28 -}
1 -'use strict'
2 -
3 -const u = require('universalify').fromCallback
4 -const jsonFile = require('./jsonfile')
5 -
6 -jsonFile.outputJson = u(require('./output-json'))
7 -jsonFile.outputJsonSync = require('./output-json-sync')
8 -// aliases
9 -jsonFile.outputJSON = jsonFile.outputJson
10 -jsonFile.outputJSONSync = jsonFile.outputJsonSync
11 -jsonFile.writeJSON = jsonFile.writeJson
12 -jsonFile.writeJSONSync = jsonFile.writeJsonSync
13 -jsonFile.readJSON = jsonFile.readJson
14 -jsonFile.readJSONSync = jsonFile.readJsonSync
15 -
16 -module.exports = jsonFile
1 -'use strict'
2 -
3 -const u = require('universalify').fromCallback
4 -const jsonFile = require('jsonfile')
5 -
6 -module.exports = {
7 - // jsonfile exports
8 - readJson: u(jsonFile.readFile),
9 - readJsonSync: jsonFile.readFileSync,
10 - writeJson: u(jsonFile.writeFile),
11 - writeJsonSync: jsonFile.writeFileSync
12 -}
1 -'use strict'
2 -
3 -const fs = require('graceful-fs')
4 -const path = require('path')
5 -const mkdir = require('../mkdirs')
6 -const jsonFile = require('./jsonfile')
7 -
8 -function outputJsonSync (file, data, options) {
9 - const dir = path.dirname(file)
10 -
11 - if (!fs.existsSync(dir)) {
12 - mkdir.mkdirsSync(dir)
13 - }
14 -
15 - jsonFile.writeJsonSync(file, data, options)
16 -}
17 -
18 -module.exports = outputJsonSync
1 -'use strict'
2 -
3 -const path = require('path')
4 -const mkdir = require('../mkdirs')
5 -const pathExists = require('../path-exists').pathExists
6 -const jsonFile = require('./jsonfile')
7 -
8 -function outputJson (file, data, options, callback) {
9 - if (typeof options === 'function') {
10 - callback = options
11 - options = {}
12 - }
13 -
14 - const dir = path.dirname(file)
15 -
16 - pathExists(dir, (err, itDoes) => {
17 - if (err) return callback(err)
18 - if (itDoes) return jsonFile.writeJson(file, data, options, callback)
19 -
20 - mkdir.mkdirs(dir, err => {
21 - if (err) return callback(err)
22 - jsonFile.writeJson(file, data, options, callback)
23 - })
24 - })
25 -}
26 -
27 -module.exports = outputJson
1 -'use strict'
2 -const u = require('universalify').fromCallback
3 -const mkdirs = u(require('./mkdirs'))
4 -const mkdirsSync = require('./mkdirs-sync')
5 -
6 -module.exports = {
7 - mkdirs,
8 - mkdirsSync,
9 - // alias
10 - mkdirp: mkdirs,
11 - mkdirpSync: mkdirsSync,
12 - ensureDir: mkdirs,
13 - ensureDirSync: mkdirsSync
14 -}
1 -'use strict'
2 -
3 -const fs = require('graceful-fs')
4 -const path = require('path')
5 -const invalidWin32Path = require('./win32').invalidWin32Path
6 -
7 -const o777 = parseInt('0777', 8)
8 -
9 -function mkdirsSync (p, opts, made) {
10 - if (!opts || typeof opts !== 'object') {
11 - opts = { mode: opts }
12 - }
13 -
14 - let mode = opts.mode
15 - const xfs = opts.fs || fs
16 -
17 - if (process.platform === 'win32' && invalidWin32Path(p)) {
18 - const errInval = new Error(p + ' contains invalid WIN32 path characters.')
19 - errInval.code = 'EINVAL'
20 - throw errInval
21 - }
22 -
23 - if (mode === undefined) {
24 - mode = o777 & (~process.umask())
25 - }
26 - if (!made) made = null
27 -
28 - p = path.resolve(p)
29 -
30 - try {
31 - xfs.mkdirSync(p, mode)
32 - made = made || p
33 - } catch (err0) {
34 - if (err0.code === 'ENOENT') {
35 - if (path.dirname(p) === p) throw err0
36 - made = mkdirsSync(path.dirname(p), opts, made)
37 - mkdirsSync(p, opts, made)
38 - } else {
39 - // In the case of any other error, just see if there's a dir there
40 - // already. If so, then hooray! If not, then something is borked.
41 - let stat
42 - try {
43 - stat = xfs.statSync(p)
44 - } catch (err1) {
45 - throw err0
46 - }
47 - if (!stat.isDirectory()) throw err0
48 - }
49 - }
50 -
51 - return made
52 -}
53 -
54 -module.exports = mkdirsSync
1 -'use strict'
2 -
3 -const fs = require('graceful-fs')
4 -const path = require('path')
5 -const invalidWin32Path = require('./win32').invalidWin32Path
6 -
7 -const o777 = parseInt('0777', 8)
8 -
9 -function mkdirs (p, opts, callback, made) {
10 - if (typeof opts === 'function') {
11 - callback = opts
12 - opts = {}
13 - } else if (!opts || typeof opts !== 'object') {
14 - opts = { mode: opts }
15 - }
16 -
17 - if (process.platform === 'win32' && invalidWin32Path(p)) {
18 - const errInval = new Error(p + ' contains invalid WIN32 path characters.')
19 - errInval.code = 'EINVAL'
20 - return callback(errInval)
21 - }
22 -
23 - let mode = opts.mode
24 - const xfs = opts.fs || fs
25 -
26 - if (mode === undefined) {
27 - mode = o777 & (~process.umask())
28 - }
29 - if (!made) made = null
30 -
31 - callback = callback || function () {}
32 - p = path.resolve(p)
33 -
34 - xfs.mkdir(p, mode, er => {
35 - if (!er) {
36 - made = made || p
37 - return callback(null, made)
38 - }
39 - switch (er.code) {
40 - case 'ENOENT':
41 - if (path.dirname(p) === p) return callback(er)
42 - mkdirs(path.dirname(p), opts, (er, made) => {
43 - if (er) callback(er, made)
44 - else mkdirs(p, opts, callback, made)
45 - })
46 - break
47 -
48 - // In the case of any other error, just see if there's a dir
49 - // there already. If so, then hooray! If not, then something
50 - // is borked.
51 - default:
52 - xfs.stat(p, (er2, stat) => {
53 - // if the stat fails, then that's super weird.
54 - // let the original error be the failure reason.
55 - if (er2 || !stat.isDirectory()) callback(er, made)
56 - else callback(null, made)
57 - })
58 - break
59 - }
60 - })
61 -}
62 -
63 -module.exports = mkdirs
1 -'use strict'
2 -
3 -const path = require('path')
4 -
5 -// get drive on windows
6 -function getRootPath (p) {
7 - p = path.normalize(path.resolve(p)).split(path.sep)
8 - if (p.length > 0) return p[0]
9 - return null
10 -}
11 -
12 -// http://stackoverflow.com/a/62888/10333 contains more accurate
13 -// TODO: expand to include the rest
14 -const INVALID_PATH_CHARS = /[<>:"|?*]/
15 -
16 -function invalidWin32Path (p) {
17 - const rp = getRootPath(p)
18 - p = p.replace(rp, '')
19 - return INVALID_PATH_CHARS.test(p)
20 -}
21 -
22 -module.exports = {
23 - getRootPath,
24 - invalidWin32Path
25 -}
1 -'use strict'
2 -
3 -module.exports = {
4 - moveSync: require('./move-sync')
5 -}
1 -'use strict'
2 -
3 -const fs = require('graceful-fs')
4 -const path = require('path')
5 -const copySync = require('../copy-sync').copySync
6 -const removeSync = require('../remove').removeSync
7 -const mkdirpSync = require('../mkdirs').mkdirpSync
8 -const stat = require('../util/stat')
9 -
10 -function moveSync (src, dest, opts) {
11 - opts = opts || {}
12 - const overwrite = opts.overwrite || opts.clobber || false
13 -
14 - const { srcStat } = stat.checkPathsSync(src, dest, 'move')
15 - stat.checkParentPathsSync(src, srcStat, dest, 'move')
16 - mkdirpSync(path.dirname(dest))
17 - return doRename(src, dest, overwrite)
18 -}
19 -
20 -function doRename (src, dest, overwrite) {
21 - if (overwrite) {
22 - removeSync(dest)
23 - return rename(src, dest, overwrite)
24 - }
25 - if (fs.existsSync(dest)) throw new Error('dest already exists.')
26 - return rename(src, dest, overwrite)
27 -}
28 -
29 -function rename (src, dest, overwrite) {
30 - try {
31 - fs.renameSync(src, dest)
32 - } catch (err) {
33 - if (err.code !== 'EXDEV') throw err
34 - return moveAcrossDevice(src, dest, overwrite)
35 - }
36 -}
37 -
38 -function moveAcrossDevice (src, dest, overwrite) {
39 - const opts = {
40 - overwrite,
41 - errorOnExist: true
42 - }
43 - copySync(src, dest, opts)
44 - return removeSync(src)
45 -}
46 -
47 -module.exports = moveSync
1 -'use strict'
2 -
3 -const u = require('universalify').fromCallback
4 -module.exports = {
5 - move: u(require('./move'))
6 -}
1 -'use strict'
2 -
3 -const fs = require('graceful-fs')
4 -const path = require('path')
5 -const copy = require('../copy').copy
6 -const remove = require('../remove').remove
7 -const mkdirp = require('../mkdirs').mkdirp
8 -const pathExists = require('../path-exists').pathExists
9 -const stat = require('../util/stat')
10 -
11 -function move (src, dest, opts, cb) {
12 - if (typeof opts === 'function') {
13 - cb = opts
14 - opts = {}
15 - }
16 -
17 - const overwrite = opts.overwrite || opts.clobber || false
18 -
19 - stat.checkPaths(src, dest, 'move', (err, stats) => {
20 - if (err) return cb(err)
21 - const { srcStat } = stats
22 - stat.checkParentPaths(src, srcStat, dest, 'move', err => {
23 - if (err) return cb(err)
24 - mkdirp(path.dirname(dest), err => {
25 - if (err) return cb(err)
26 - return doRename(src, dest, overwrite, cb)
27 - })
28 - })
29 - })
30 -}
31 -
32 -function doRename (src, dest, overwrite, cb) {
33 - if (overwrite) {
34 - return remove(dest, err => {
35 - if (err) return cb(err)
36 - return rename(src, dest, overwrite, cb)
37 - })
38 - }
39 - pathExists(dest, (err, destExists) => {
40 - if (err) return cb(err)
41 - if (destExists) return cb(new Error('dest already exists.'))
42 - return rename(src, dest, overwrite, cb)
43 - })
44 -}
45 -
46 -function rename (src, dest, overwrite, cb) {
47 - fs.rename(src, dest, err => {
48 - if (!err) return cb()
49 - if (err.code !== 'EXDEV') return cb(err)
50 - return moveAcrossDevice(src, dest, overwrite, cb)
51 - })
52 -}
53 -
54 -function moveAcrossDevice (src, dest, overwrite, cb) {
55 - const opts = {
56 - overwrite,
57 - errorOnExist: true
58 - }
59 - copy(src, dest, opts, err => {
60 - if (err) return cb(err)
61 - return remove(src, cb)
62 - })
63 -}
64 -
65 -module.exports = move
1 -'use strict'
2 -
3 -const u = require('universalify').fromCallback
4 -const fs = require('graceful-fs')
5 -const path = require('path')
6 -const mkdir = require('../mkdirs')
7 -const pathExists = require('../path-exists').pathExists
8 -
9 -function outputFile (file, data, encoding, callback) {
10 - if (typeof encoding === 'function') {
11 - callback = encoding
12 - encoding = 'utf8'
13 - }
14 -
15 - const dir = path.dirname(file)
16 - pathExists(dir, (err, itDoes) => {
17 - if (err) return callback(err)
18 - if (itDoes) return fs.writeFile(file, data, encoding, callback)
19 -
20 - mkdir.mkdirs(dir, err => {
21 - if (err) return callback(err)
22 -
23 - fs.writeFile(file, data, encoding, callback)
24 - })
25 - })
26 -}
27 -
28 -function outputFileSync (file, ...args) {
29 - const dir = path.dirname(file)
30 - if (fs.existsSync(dir)) {
31 - return fs.writeFileSync(file, ...args)
32 - }
33 - mkdir.mkdirsSync(dir)
34 - fs.writeFileSync(file, ...args)
35 -}
36 -
37 -module.exports = {
38 - outputFile: u(outputFile),
39 - outputFileSync
40 -}
1 -'use strict'
2 -const u = require('universalify').fromPromise
3 -const fs = require('../fs')
4 -
5 -function pathExists (path) {
6 - return fs.access(path).then(() => true).catch(() => false)
7 -}
8 -
9 -module.exports = {
10 - pathExists: u(pathExists),
11 - pathExistsSync: fs.existsSync
12 -}
1 -'use strict'
2 -
3 -const u = require('universalify').fromCallback
4 -const rimraf = require('./rimraf')
5 -
6 -module.exports = {
7 - remove: u(rimraf),
8 - removeSync: rimraf.sync
9 -}
1 -'use strict'
2 -
3 -const fs = require('graceful-fs')
4 -const path = require('path')
5 -const assert = require('assert')
6 -
7 -const isWindows = (process.platform === 'win32')
8 -
9 -function defaults (options) {
10 - const methods = [
11 - 'unlink',
12 - 'chmod',
13 - 'stat',
14 - 'lstat',
15 - 'rmdir',
16 - 'readdir'
17 - ]
18 - methods.forEach(m => {
19 - options[m] = options[m] || fs[m]
20 - m = m + 'Sync'
21 - options[m] = options[m] || fs[m]
22 - })
23 -
24 - options.maxBusyTries = options.maxBusyTries || 3
25 -}
26 -
27 -function rimraf (p, options, cb) {
28 - let busyTries = 0
29 -
30 - if (typeof options === 'function') {
31 - cb = options
32 - options = {}
33 - }
34 -
35 - assert(p, 'rimraf: missing path')
36 - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')
37 - assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required')
38 - assert(options, 'rimraf: invalid options argument provided')
39 - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')
40 -
41 - defaults(options)
42 -
43 - rimraf_(p, options, function CB (er) {
44 - if (er) {
45 - if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') &&
46 - busyTries < options.maxBusyTries) {
47 - busyTries++
48 - const time = busyTries * 100
49 - // try again, with the same exact callback as this one.
50 - return setTimeout(() => rimraf_(p, options, CB), time)
51 - }
52 -
53 - // already gone
54 - if (er.code === 'ENOENT') er = null
55 - }
56 -
57 - cb(er)
58 - })
59 -}
60 -
61 -// Two possible strategies.
62 -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
63 -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
64 -//
65 -// Both result in an extra syscall when you guess wrong. However, there
66 -// are likely far more normal files in the world than directories. This
67 -// is based on the assumption that a the average number of files per
68 -// directory is >= 1.
69 -//
70 -// If anyone ever complains about this, then I guess the strategy could
71 -// be made configurable somehow. But until then, YAGNI.
72 -function rimraf_ (p, options, cb) {
73 - assert(p)
74 - assert(options)
75 - assert(typeof cb === 'function')
76 -
77 - // sunos lets the root user unlink directories, which is... weird.
78 - // so we have to lstat here and make sure it's not a dir.
79 - options.lstat(p, (er, st) => {
80 - if (er && er.code === 'ENOENT') {
81 - return cb(null)
82 - }
83 -
84 - // Windows can EPERM on stat. Life is suffering.
85 - if (er && er.code === 'EPERM' && isWindows) {
86 - return fixWinEPERM(p, options, er, cb)
87 - }
88 -
89 - if (st && st.isDirectory()) {
90 - return rmdir(p, options, er, cb)
91 - }
92 -
93 - options.unlink(p, er => {
94 - if (er) {
95 - if (er.code === 'ENOENT') {
96 - return cb(null)
97 - }
98 - if (er.code === 'EPERM') {
99 - return (isWindows)
100 - ? fixWinEPERM(p, options, er, cb)
101 - : rmdir(p, options, er, cb)
102 - }
103 - if (er.code === 'EISDIR') {
104 - return rmdir(p, options, er, cb)
105 - }
106 - }
107 - return cb(er)
108 - })
109 - })
110 -}
111 -
112 -function fixWinEPERM (p, options, er, cb) {
113 - assert(p)
114 - assert(options)
115 - assert(typeof cb === 'function')
116 - if (er) {
117 - assert(er instanceof Error)
118 - }
119 -
120 - options.chmod(p, 0o666, er2 => {
121 - if (er2) {
122 - cb(er2.code === 'ENOENT' ? null : er)
123 - } else {
124 - options.stat(p, (er3, stats) => {
125 - if (er3) {
126 - cb(er3.code === 'ENOENT' ? null : er)
127 - } else if (stats.isDirectory()) {
128 - rmdir(p, options, er, cb)
129 - } else {
130 - options.unlink(p, cb)
131 - }
132 - })
133 - }
134 - })
135 -}
136 -
137 -function fixWinEPERMSync (p, options, er) {
138 - let stats
139 -
140 - assert(p)
141 - assert(options)
142 - if (er) {
143 - assert(er instanceof Error)
144 - }
145 -
146 - try {
147 - options.chmodSync(p, 0o666)
148 - } catch (er2) {
149 - if (er2.code === 'ENOENT') {
150 - return
151 - } else {
152 - throw er
153 - }
154 - }
155 -
156 - try {
157 - stats = options.statSync(p)
158 - } catch (er3) {
159 - if (er3.code === 'ENOENT') {
160 - return
161 - } else {
162 - throw er
163 - }
164 - }
165 -
166 - if (stats.isDirectory()) {
167 - rmdirSync(p, options, er)
168 - } else {
169 - options.unlinkSync(p)
170 - }
171 -}
172 -
173 -function rmdir (p, options, originalEr, cb) {
174 - assert(p)
175 - assert(options)
176 - if (originalEr) {
177 - assert(originalEr instanceof Error)
178 - }
179 - assert(typeof cb === 'function')
180 -
181 - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
182 - // if we guessed wrong, and it's not a directory, then
183 - // raise the original error.
184 - options.rmdir(p, er => {
185 - if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) {
186 - rmkids(p, options, cb)
187 - } else if (er && er.code === 'ENOTDIR') {
188 - cb(originalEr)
189 - } else {
190 - cb(er)
191 - }
192 - })
193 -}
194 -
195 -function rmkids (p, options, cb) {
196 - assert(p)
197 - assert(options)
198 - assert(typeof cb === 'function')
199 -
200 - options.readdir(p, (er, files) => {
201 - if (er) return cb(er)
202 -
203 - let n = files.length
204 - let errState
205 -
206 - if (n === 0) return options.rmdir(p, cb)
207 -
208 - files.forEach(f => {
209 - rimraf(path.join(p, f), options, er => {
210 - if (errState) {
211 - return
212 - }
213 - if (er) return cb(errState = er)
214 - if (--n === 0) {
215 - options.rmdir(p, cb)
216 - }
217 - })
218 - })
219 - })
220 -}
221 -
222 -// this looks simpler, and is strictly *faster*, but will
223 -// tie up the JavaScript thread and fail on excessively
224 -// deep directory trees.
225 -function rimrafSync (p, options) {
226 - let st
227 -
228 - options = options || {}
229 - defaults(options)
230 -
231 - assert(p, 'rimraf: missing path')
232 - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string')
233 - assert(options, 'rimraf: missing options')
234 - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object')
235 -
236 - try {
237 - st = options.lstatSync(p)
238 - } catch (er) {
239 - if (er.code === 'ENOENT') {
240 - return
241 - }
242 -
243 - // Windows can EPERM on stat. Life is suffering.
244 - if (er.code === 'EPERM' && isWindows) {
245 - fixWinEPERMSync(p, options, er)
246 - }
247 - }
248 -
249 - try {
250 - // sunos lets the root user unlink directories, which is... weird.
251 - if (st && st.isDirectory()) {
252 - rmdirSync(p, options, null)
253 - } else {
254 - options.unlinkSync(p)
255 - }
256 - } catch (er) {
257 - if (er.code === 'ENOENT') {
258 - return
259 - } else if (er.code === 'EPERM') {
260 - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
261 - } else if (er.code !== 'EISDIR') {
262 - throw er
263 - }
264 - rmdirSync(p, options, er)
265 - }
266 -}
267 -
268 -function rmdirSync (p, options, originalEr) {
269 - assert(p)
270 - assert(options)
271 - if (originalEr) {
272 - assert(originalEr instanceof Error)
273 - }
274 -
275 - try {
276 - options.rmdirSync(p)
277 - } catch (er) {
278 - if (er.code === 'ENOTDIR') {
279 - throw originalEr
280 - } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') {
281 - rmkidsSync(p, options)
282 - } else if (er.code !== 'ENOENT') {
283 - throw er
284 - }
285 - }
286 -}
287 -
288 -function rmkidsSync (p, options) {
289 - assert(p)
290 - assert(options)
291 - options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options))
292 -
293 - if (isWindows) {
294 - // We only end up here once we got ENOTEMPTY at least once, and
295 - // at this point, we are guaranteed to have removed all the kids.
296 - // So, we know that it won't be ENOENT or ENOTDIR or anything else.
297 - // try really hard to delete stuff on windows, because it has a
298 - // PROFOUNDLY annoying habit of not closing handles promptly when
299 - // files are deleted, resulting in spurious ENOTEMPTY errors.
300 - const startTime = Date.now()
301 - do {
302 - try {
303 - const ret = options.rmdirSync(p, options)
304 - return ret
305 - } catch (er) { }
306 - } while (Date.now() - startTime < 500) // give up after 500ms
307 - } else {
308 - const ret = options.rmdirSync(p, options)
309 - return ret
310 - }
311 -}
312 -
313 -module.exports = rimraf
314 -rimraf.sync = rimrafSync
1 -'use strict'
2 -/* eslint-disable node/no-deprecated-api */
3 -module.exports = function (size) {
4 - if (typeof Buffer.allocUnsafe === 'function') {
5 - try {
6 - return Buffer.allocUnsafe(size)
7 - } catch (e) {
8 - return new Buffer(size)
9 - }
10 - }
11 - return new Buffer(size)
12 -}
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff could not be displayed because it is too large.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.
This diff is collapsed. Click to expand it.