"use strict"; var __read = (this && this.__read) || function (o, n) { var m = typeof Symbol === "function" && o[Symbol.iterator]; if (!m) return o; var i = m.call(o), r, ar = [], e; try { while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); } catch (error) { e = { error: error }; } finally { try { if (r && !r.done && (m = i["return"])) m.call(i); } finally { if (e) throw e.error; } } return ar; }; var __values = (this && this.__values) || function(o) { var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; if (m) return m.call(o); if (o && typeof o.length === "number") return { next: function () { if (o && i >= o.length) o = void 0; return { value: o && o[i++], done: !o }; } }; throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); }; Object.defineProperty(exports, "__esModule", { value: true }); var fs_1 = require("fs"); var mkdirp = require("mkdirp"); var path_1 = require("path"); var sha1_1 = require("../util/sha1"); var compiler_utils_1 = require("./compiler-utils"); var language_service_1 = require("./language-service"); var program_1 = require("./program"); var transpile_module_1 = require("./transpile-module"); function updateOutput(outputText, normalizedFileName, sourceMap, getExtension) { var base = path_1.basename(normalizedFileName); var base64Map = Buffer.from(updateSourceMap(sourceMap, normalizedFileName), 'utf8').toString('base64'); var sourceMapContent = "data:application/json;charset=utf-8;base64," + base64Map; var sourceMapLength = (base + ".map").length + (getExtension(normalizedFileName).length - path_1.extname(normalizedFileName).length); return outputText.slice(0, -sourceMapLength) + sourceMapContent; } var updateSourceMap = function (sourceMapText, normalizedFileName) { var sourceMap = JSON.parse(sourceMapText); sourceMap.file = normalizedFileName; sourceMap.sources = [normalizedFileName]; delete sourceMap.sourceRoot; return JSON.stringify(sourceMap); }; var getCacheName = function (sourceCode, normalizedFileName) { return sha1_1.sha1(normalizedFileName, '\x00', sourceCode); }; var isValidCacheContent = function (contents) { return /(?:9|0=|Q==)$/.test(contents.slice(-3)); }; var readThrough = function (cachedir, memoryCache, compileFn, getExtension, logger) { if (!cachedir) { return function (code, fileName, lineOffset) { var normalizedFileName = path_1.normalize(fileName); logger.debug({ normalizedFileName: normalizedFileName }, 'readThrough(): no cache'); var _a = __read(compileFn(code, normalizedFileName, lineOffset), 2), value = _a[0], sourceMap = _a[1]; var output = updateOutput(value, fileName, sourceMap, getExtension); memoryCache.outputs[normalizedFileName] = output; return output; }; } mkdirp.sync(cachedir); try { var resolvedModulesCache = fs_1.readFileSync(compiler_utils_1.getResolvedModulesCache(cachedir), 'utf-8'); memoryCache.resolvedModules = JSON.parse(resolvedModulesCache); } catch (e) { } return function (code, fileName, lineOffset) { var normalizedFileName = path_1.normalize(fileName); var cachePath = path_1.join(cachedir, getCacheName(code, normalizedFileName)); var extension = getExtension(normalizedFileName); var outputPath = "" + cachePath + extension; try { var output_1 = fs_1.readFileSync(outputPath, 'utf8'); if (isValidCacheContent(output_1)) { logger.debug({ normalizedFileName: normalizedFileName }, 'readThrough(): cache hit'); memoryCache.outputs[normalizedFileName] = output_1; return output_1; } } catch (err) { } logger.debug({ fileName: fileName }, 'readThrough(): cache miss'); var _a = __read(compileFn(code, normalizedFileName, lineOffset), 2), value = _a[0], sourceMap = _a[1]; var output = updateOutput(value, normalizedFileName, sourceMap, getExtension); logger.debug({ normalizedFileName: normalizedFileName, outputPath: outputPath }, 'readThrough(): writing caches'); memoryCache.outputs[normalizedFileName] = output; fs_1.writeFileSync(outputPath, output); return output; }; }; exports.createCompiler = function (configs) { var e_1, _a; var logger = configs.logger.child({ namespace: 'ts-compiler' }); var _b = configs.typescript, compilerOptions = _b.options, fileNames = _b.fileNames, tsJest = configs.tsJest; var cachedir = configs.tsCacheDir; var ts = configs.compilerModule; var extensions = ['.ts', '.tsx']; var memoryCache = { contents: Object.create(null), versions: Object.create(null), outputs: Object.create(null), resolvedModules: Object.create(null), }; if (compilerOptions.allowJs) { extensions.push('.js'); extensions.push('.jsx'); } try { for (var fileNames_1 = __values(fileNames), fileNames_1_1 = fileNames_1.next(); !fileNames_1_1.done; fileNames_1_1 = fileNames_1.next()) { var path = fileNames_1_1.value; memoryCache.versions[path_1.normalize(path)] = 1; } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (fileNames_1_1 && !fileNames_1_1.done && (_a = fileNames_1.return)) _a.call(fileNames_1); } finally { if (e_1) throw e_1.error; } } var getExtension = compilerOptions.jsx === ts.JsxEmit.Preserve ? function (path) { return (/\.[tj]sx$/.test(path) ? '.jsx' : '.js'); } : function (_) { return '.js'; }; var compilerInstance; if (!tsJest.isolatedModules) { compilerInstance = !tsJest.compilerHost ? language_service_1.compileUsingLanguageService(configs, logger, memoryCache) : program_1.compileUsingProgram(configs, logger, memoryCache); } else { compilerInstance = transpile_module_1.compileUsingTranspileModule(configs, logger); } var compile = readThrough(cachedir, memoryCache, compilerInstance.compileFn, getExtension, logger); return { cwd: configs.cwd, compile: compile, program: compilerInstance.program }; };