Add node modules and compiled JavaScript from main (#57)

Co-authored-by: Oliver King <oking3@uncc.edu>
This commit is contained in:
github-actions[bot] 2022-06-21 12:18:30 -04:00 committed by GitHub
parent d893f27da9
commit 7f7e5ba5ea
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6750 changed files with 1745644 additions and 10860 deletions

View file

@ -0,0 +1,182 @@
/*
Copyright 2015, Yahoo Inc.
Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms.
*/
'use strict';
const pathutils = require('./pathutils');
const {
GREATEST_LOWER_BOUND,
LEAST_UPPER_BOUND
} = require('source-map').SourceMapConsumer;
/**
* AST ranges are inclusive for start positions and exclusive for end positions.
* Source maps are also logically ranges over text, though interacting with
* them is generally achieved by working with explicit positions.
*
* When finding the _end_ location of an AST item, the range behavior is
* important because what we're asking for is the _end_ of whatever range
* corresponds to the end location we seek.
*
* This boils down to the following steps, conceptually, though the source-map
* library doesn't expose primitives to do this nicely:
*
* 1. Find the range on the generated file that ends at, or exclusively
* contains the end position of the AST node.
* 2. Find the range on the original file that corresponds to
* that generated range.
* 3. Find the _end_ location of that original range.
*/
function originalEndPositionFor(sourceMap, generatedEnd) {
// Given the generated location, find the original location of the mapping
// that corresponds to a range on the generated file that overlaps the
// generated file end location. Note however that this position on its
// own is not useful because it is the position of the _start_ of the range
// on the original file, and we want the _end_ of the range.
const beforeEndMapping = originalPositionTryBoth(
sourceMap,
generatedEnd.line,
generatedEnd.column - 1
);
if (beforeEndMapping.source === null) {
return null;
}
// Convert that original position back to a generated one, with a bump
// to the right, and a rightward bias. Since 'generatedPositionFor' searches
// for mappings in the original-order sorted list, this will find the
// mapping that corresponds to the one immediately after the
// beforeEndMapping mapping.
const afterEndMapping = sourceMap.generatedPositionFor({
source: beforeEndMapping.source,
line: beforeEndMapping.line,
column: beforeEndMapping.column + 1,
bias: LEAST_UPPER_BOUND
});
if (
// If this is null, it means that we've hit the end of the file,
// so we can use Infinity as the end column.
afterEndMapping.line === null ||
// If these don't match, it means that the call to
// 'generatedPositionFor' didn't find any other original mappings on
// the line we gave, so consider the binding to extend to infinity.
sourceMap.originalPositionFor(afterEndMapping).line !==
beforeEndMapping.line
) {
return {
source: beforeEndMapping.source,
line: beforeEndMapping.line,
column: Infinity
};
}
// Convert the end mapping into the real original position.
return sourceMap.originalPositionFor(afterEndMapping);
}
/**
* Attempts to determine the original source position, first
* returning the closest element to the left (GREATEST_LOWER_BOUND),
* and next returning the closest element to the right (LEAST_UPPER_BOUND).
*/
function originalPositionTryBoth(sourceMap, line, column) {
const mapping = sourceMap.originalPositionFor({
line,
column,
bias: GREATEST_LOWER_BOUND
});
if (mapping.source === null) {
return sourceMap.originalPositionFor({
line,
column,
bias: LEAST_UPPER_BOUND
});
} else {
return mapping;
}
}
function isInvalidPosition(pos) {
return (
!pos ||
typeof pos.line !== 'number' ||
typeof pos.column !== 'number' ||
pos.line < 0 ||
pos.column < 0
);
}
/**
* determines the original position for a given location
* @param {SourceMapConsumer} sourceMap the source map
* @param {Object} generatedLocation the original location Object
* @returns {Object} the remapped location Object
*/
function getMapping(sourceMap, generatedLocation, origFile) {
if (!generatedLocation) {
return null;
}
if (
isInvalidPosition(generatedLocation.start) ||
isInvalidPosition(generatedLocation.end)
) {
return null;
}
const start = originalPositionTryBoth(
sourceMap,
generatedLocation.start.line,
generatedLocation.start.column
);
let end = originalEndPositionFor(sourceMap, generatedLocation.end);
/* istanbul ignore if: edge case too hard to test for */
if (!(start && end)) {
return null;
}
if (!(start.source && end.source)) {
return null;
}
if (start.source !== end.source) {
return null;
}
/* istanbul ignore if: edge case too hard to test for */
if (start.line === null || start.column === null) {
return null;
}
/* istanbul ignore if: edge case too hard to test for */
if (end.line === null || end.column === null) {
return null;
}
if (start.line === end.line && start.column === end.column) {
end = sourceMap.originalPositionFor({
line: generatedLocation.end.line,
column: generatedLocation.end.column,
bias: LEAST_UPPER_BOUND
});
end.column -= 1;
}
return {
source: pathutils.relativeTo(start.source, origFile),
loc: {
start: {
line: start.line,
column: start.column
},
end: {
line: end.line,
column: end.column
}
}
};
}
module.exports = getMapping;

226
node_modules/istanbul-lib-source-maps/lib/map-store.js generated vendored Normal file
View file

@ -0,0 +1,226 @@
/*
Copyright 2015, Yahoo Inc.
Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms.
*/
'use strict';
const path = require('path');
const fs = require('fs');
const debug = require('debug')('istanbuljs');
const { SourceMapConsumer } = require('source-map');
const pathutils = require('./pathutils');
const { SourceMapTransformer } = require('./transformer');
/**
* Tracks source maps for registered files
*/
class MapStore {
/**
* @param {Object} opts [opts=undefined] options.
* @param {Boolean} opts.verbose [opts.verbose=false] verbose mode
* @param {String} opts.baseDir [opts.baseDir=null] alternate base directory
* to resolve sourcemap files
* @param {Class} opts.SourceStore [opts.SourceStore=Map] class to use for
* SourceStore. Must support `get`, `set` and `clear` methods.
* @param {Array} opts.sourceStoreOpts [opts.sourceStoreOpts=[]] arguments
* to use in the SourceStore constructor.
* @constructor
*/
constructor(opts) {
opts = {
baseDir: null,
verbose: false,
SourceStore: Map,
sourceStoreOpts: [],
...opts
};
this.baseDir = opts.baseDir;
this.verbose = opts.verbose;
this.sourceStore = new opts.SourceStore(...opts.sourceStoreOpts);
this.data = Object.create(null);
this.sourceFinder = this.sourceFinder.bind(this);
}
/**
* Registers a source map URL with this store. It makes some input sanity checks
* and silently fails on malformed input.
* @param transformedFilePath - the file path for which the source map is valid.
* This must *exactly* match the path stashed for the coverage object to be
* useful.
* @param sourceMapUrl - the source map URL, **not** a comment
*/
registerURL(transformedFilePath, sourceMapUrl) {
const d = 'data:';
if (
sourceMapUrl.length > d.length &&
sourceMapUrl.substring(0, d.length) === d
) {
const b64 = 'base64,';
const pos = sourceMapUrl.indexOf(b64);
if (pos > 0) {
this.data[transformedFilePath] = {
type: 'encoded',
data: sourceMapUrl.substring(pos + b64.length)
};
} else {
debug(`Unable to interpret source map URL: ${sourceMapUrl}`);
}
return;
}
const dir = path.dirname(path.resolve(transformedFilePath));
const file = path.resolve(dir, sourceMapUrl);
this.data[transformedFilePath] = { type: 'file', data: file };
}
/**
* Registers a source map object with this store. Makes some basic sanity checks
* and silently fails on malformed input.
* @param transformedFilePath - the file path for which the source map is valid
* @param sourceMap - the source map object
*/
registerMap(transformedFilePath, sourceMap) {
if (sourceMap && sourceMap.version) {
this.data[transformedFilePath] = {
type: 'object',
data: sourceMap
};
} else {
debug(
'Invalid source map object: ' +
JSON.stringify(sourceMap, null, 2)
);
}
}
/**
* Retrieve a source map object from this store.
* @param filePath - the file path for which the source map is valid
* @returns {Object} a parsed source map object
*/
getSourceMapSync(filePath) {
try {
if (!this.data[filePath]) {
return;
}
const d = this.data[filePath];
if (d.type === 'file') {
return JSON.parse(fs.readFileSync(d.data, 'utf8'));
}
if (d.type === 'encoded') {
return JSON.parse(Buffer.from(d.data, 'base64').toString());
}
/* The caller might delete properties */
return {
...d.data
};
} catch (error) {
debug('Error returning source map for ' + filePath);
debug(error.stack);
return;
}
}
/**
* Add inputSourceMap property to coverage data
* @param coverageData - the __coverage__ object
* @returns {Object} a parsed source map object
*/
addInputSourceMapsSync(coverageData) {
Object.entries(coverageData).forEach(([filePath, data]) => {
if (data.inputSourceMap) {
return;
}
const sourceMap = this.getSourceMapSync(filePath);
if (sourceMap) {
data.inputSourceMap = sourceMap;
/* This huge property is not needed. */
delete data.inputSourceMap.sourcesContent;
}
});
}
sourceFinder(filePath) {
const content = this.sourceStore.get(filePath);
if (content !== undefined) {
return content;
}
if (path.isAbsolute(filePath)) {
return fs.readFileSync(filePath, 'utf8');
}
return fs.readFileSync(
pathutils.asAbsolute(filePath, this.baseDir),
'utf8'
);
}
/**
* Transforms the coverage map provided into one that refers to original
* sources when valid mappings have been registered with this store.
* @param {CoverageMap} coverageMap - the coverage map to transform
* @returns {Promise<CoverageMap>} the transformed coverage map
*/
async transformCoverage(coverageMap) {
const hasInputSourceMaps = coverageMap
.files()
.some(
file => coverageMap.fileCoverageFor(file).data.inputSourceMap
);
if (!hasInputSourceMaps && Object.keys(this.data).length === 0) {
return coverageMap;
}
const transformer = new SourceMapTransformer(
async (filePath, coverage) => {
try {
const obj =
coverage.data.inputSourceMap ||
this.getSourceMapSync(filePath);
if (!obj) {
return null;
}
const smc = new SourceMapConsumer(obj);
smc.sources.forEach(s => {
const content = smc.sourceContentFor(s);
if (content) {
const sourceFilePath = pathutils.relativeTo(
s,
filePath
);
this.sourceStore.set(sourceFilePath, content);
}
});
return smc;
} catch (error) {
debug('Error returning source map for ' + filePath);
debug(error.stack);
return null;
}
}
);
return await transformer.transform(coverageMap);
}
/**
* Disposes temporary resources allocated by this map store
*/
dispose() {
this.sourceStore.clear();
}
}
module.exports = { MapStore };

113
node_modules/istanbul-lib-source-maps/lib/mapped.js generated vendored Normal file
View file

@ -0,0 +1,113 @@
/*
Copyright 2015, Yahoo Inc.
Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms.
*/
'use strict';
const { FileCoverage } = require('istanbul-lib-coverage').classes;
function locString(loc) {
return [
loc.start.line,
loc.start.column,
loc.end.line,
loc.end.column
].join(':');
}
class MappedCoverage extends FileCoverage {
constructor(pathOrObj) {
super(pathOrObj);
this.meta = {
last: {
s: 0,
f: 0,
b: 0
},
seen: {}
};
}
addStatement(loc, hits) {
const key = 's:' + locString(loc);
const { meta } = this;
let index = meta.seen[key];
if (index === undefined) {
index = meta.last.s;
meta.last.s += 1;
meta.seen[key] = index;
this.statementMap[index] = this.cloneLocation(loc);
}
this.s[index] = this.s[index] || 0;
this.s[index] += hits;
return index;
}
addFunction(name, decl, loc, hits) {
const key = 'f:' + locString(decl);
const { meta } = this;
let index = meta.seen[key];
if (index === undefined) {
index = meta.last.f;
meta.last.f += 1;
meta.seen[key] = index;
name = name || `(unknown_${index})`;
this.fnMap[index] = {
name,
decl: this.cloneLocation(decl),
loc: this.cloneLocation(loc)
};
}
this.f[index] = this.f[index] || 0;
this.f[index] += hits;
return index;
}
addBranch(type, loc, branchLocations, hits) {
const key = ['b', ...branchLocations.map(l => locString(l))].join(':');
const { meta } = this;
let index = meta.seen[key];
if (index === undefined) {
index = meta.last.b;
meta.last.b += 1;
meta.seen[key] = index;
this.branchMap[index] = {
loc,
type,
locations: branchLocations.map(l => this.cloneLocation(l))
};
}
if (!this.b[index]) {
this.b[index] = branchLocations.map(() => 0);
}
hits.forEach((hit, i) => {
this.b[index][i] += hit;
});
return index;
}
/* Returns a clone of the location object with only the attributes of interest */
cloneLocation(loc) {
return {
start: {
line: loc.start.line,
column: loc.start.column
},
end: {
line: loc.end.line,
column: loc.end.column
}
};
}
}
module.exports = {
MappedCoverage
};

21
node_modules/istanbul-lib-source-maps/lib/pathutils.js generated vendored Normal file
View file

@ -0,0 +1,21 @@
/*
Copyright 2015, Yahoo Inc.
Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms.
*/
'use strict';
const path = require('path');
module.exports = {
isAbsolute: path.isAbsolute,
asAbsolute(file, baseDir) {
return path.isAbsolute(file)
? file
: path.resolve(baseDir || process.cwd(), file);
},
relativeTo(file, origFile) {
return path.isAbsolute(file)
? file
: path.resolve(path.dirname(origFile), file);
}
};

View file

@ -0,0 +1,21 @@
/*
Copyright 2015, Yahoo Inc.
Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms.
*/
'use strict';
function getUniqueKey(pathname) {
return pathname.replace(/[\\/]/g, '_');
}
function getOutput(cache) {
return Object.values(cache).reduce(
(output, { file, mappedCoverage }) => ({
...output,
[file]: mappedCoverage
}),
{}
);
}
module.exports = { getUniqueKey, getOutput };

View file

@ -0,0 +1,133 @@
/*
Copyright 2015, Yahoo Inc.
Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms.
*/
'use strict';
const debug = require('debug')('istanbuljs');
const libCoverage = require('istanbul-lib-coverage');
const { MappedCoverage } = require('./mapped');
const getMapping = require('./get-mapping');
const { getUniqueKey, getOutput } = require('./transform-utils');
class SourceMapTransformer {
constructor(finder, opts = {}) {
this.finder = finder;
this.baseDir = opts.baseDir || process.cwd();
}
processFile(fc, sourceMap, coverageMapper) {
let changes = 0;
Object.entries(fc.statementMap).forEach(([s, loc]) => {
const hits = fc.s[s];
const mapping = getMapping(sourceMap, loc, fc.path);
if (mapping) {
changes += 1;
const mappedCoverage = coverageMapper(mapping.source);
mappedCoverage.addStatement(mapping.loc, hits);
}
});
Object.entries(fc.fnMap).forEach(([f, fnMeta]) => {
const hits = fc.f[f];
const mapping = getMapping(sourceMap, fnMeta.decl, fc.path);
const spanMapping = getMapping(sourceMap, fnMeta.loc, fc.path);
if (
mapping &&
spanMapping &&
mapping.source === spanMapping.source
) {
changes += 1;
const mappedCoverage = coverageMapper(mapping.source);
mappedCoverage.addFunction(
fnMeta.name,
mapping.loc,
spanMapping.loc,
hits
);
}
});
Object.entries(fc.branchMap).forEach(([b, branchMeta]) => {
const hits = fc.b[b];
const locs = [];
const mappedHits = [];
let source;
let skip;
branchMeta.locations.forEach((loc, i) => {
const mapping = getMapping(sourceMap, loc, fc.path);
if (mapping) {
if (!source) {
source = mapping.source;
}
if (mapping.source !== source) {
skip = true;
}
locs.push(mapping.loc);
mappedHits.push(hits[i]);
}
});
if (!skip && locs.length > 0) {
changes += 1;
const mappedCoverage = coverageMapper(source);
mappedCoverage.addBranch(
branchMeta.type,
locs[0] /* XXX */,
locs,
mappedHits
);
}
});
return changes > 0;
}
async transform(coverageMap) {
const uniqueFiles = {};
const getMappedCoverage = file => {
const key = getUniqueKey(file);
if (!uniqueFiles[key]) {
uniqueFiles[key] = {
file,
mappedCoverage: new MappedCoverage(file)
};
}
return uniqueFiles[key].mappedCoverage;
};
for (const file of coverageMap.files()) {
const fc = coverageMap.fileCoverageFor(file);
const sourceMap = await this.finder(file, fc);
if (sourceMap) {
const changed = this.processFile(
fc,
sourceMap,
getMappedCoverage
);
if (!changed) {
debug(`File [${file}] ignored, nothing could be mapped`);
}
} else {
uniqueFiles[getUniqueKey(file)] = {
file,
mappedCoverage: new MappedCoverage(fc)
};
}
}
return libCoverage.createCoverageMap(getOutput(uniqueFiles));
}
}
module.exports = {
SourceMapTransformer
};