Browse Source

Merge branch 'master' of https://github.com/juanfranblanco/vscode-solidity

Juan Blanco 1 year ago
parent
commit
b7f5278d4b

File diff suppressed because it is too large
+ 3 - 3
README-DEVELOP.md


+ 11 - 2
package.json

@@ -55,24 +55,33 @@
     "ajv": "^6.5.2",
     "ajv-keywords": "^3.2.0",
     "armlet": "^2.0.0",
+    "find-cache-dir": "^2.1.0",
     "fs-extra": "^4.0.3",
     "handlebars": "^4.0.12",
     "nethereum-codegen": "^1.0.6",
+    "ora": "^3.1.0",
     "read-yaml": "^1.1.0",
     "solc": "^0.5.6",
+    "require-from-string": "^2.0.2",
     "solhint": "^1.4.0",
     "solium": "^1.2.3",
+    "request-promise": "^4.2.2",
     "solparse": "^2.2.8",
-    "truffle-config": "^1.1.6",
-    "truffle-workflow-compile": "2.0.8",
+    "truffle-artifactor": "^4.0.3",
+    "truffle-config": "^1.1.3",
+    "truffle-contract-sources": "^0.1.2",
+    "truffle-external-compile": "^1.0.4",
+    "truffle-resolver": "^5.0.4",
     "vscode-languageclient": "^5.1.1",
     "vscode-languageserver": "^5.1.0",
     "vscode-uri": "^1.0.3"
   },
   "devDependencies": {
     "@types/assert": "^1.4.1",
+    "@types/debug": "^4.1.2",
     "@types/mocha": "^5.2.5",
     "@types/node": "^10.5.5",
+    "@types/ora": "^3.2.0",
     "electron-rebuild": "^1.8.2",
     "mocha": "^5.2.0",
     "mock-require": "^3.0.3",

src/analysis/mythx/astWalker.ts → src/analysis/mythx/compat/remix-lib/astWalker.ts


+ 1 - 1
src/analysis/mythx/sourceMappingDecoder.ts

@@ -7,7 +7,7 @@
 ***/
 
 import { AstWalker } from './astWalker';
-import {findLowerBound} from './srcmap';
+import {findLowerBound} from '../../srcmap';
 
 /**
  * Decompress the source mapping given by solc-bin.js

+ 15 - 0
src/analysis/mythx/compat/truffle-compile/compileerror.ts

@@ -0,0 +1,15 @@
+import * as colors from 'colors';
+import * as TruffleError from 'truffle-error';
+
+
+export default class CompileError extends TruffleError {
+  public message: string;
+
+  constructor(message: string) {
+    const fancy_message = message.trim() + '\n' + colors.red('Compilation failed. See above.');
+    const normal_message = message.trim();
+
+    super(normal_message);
+    this.message = fancy_message;
+  }
+}

+ 95 - 0
src/analysis/mythx/compat/truffle-compile/compilerSupplier/index.ts

@@ -0,0 +1,95 @@
+import * as path from 'path';
+import * as fs from 'fs';
+import * as semver from 'semver';
+import Bundled from './loadingStrategies/Bundled';
+import Docker from './loadingStrategies/Docker';
+import Native from './loadingStrategies/Native';
+import Local from './loadingStrategies/Local';
+import VersionRange from './loadingStrategies/VersionRange';
+
+
+export default class CompilerSupplier {
+  private config: any;
+  private strategyOptions: any;
+
+  constructor(_config) {
+    _config = _config || {};
+    const defaultConfig = { version: null };
+    this.config = Object.assign({}, defaultConfig, _config);
+    this.strategyOptions = { version: this.config.version };
+  }
+
+  public badInputError(userSpecification) {
+    const message =
+      `Could not find a compiler version matching ${userSpecification}. ` +
+      `compilers.solc.version option must be a string specifying:\n` +
+      `   - a path to a locally installed solcjs\n` +
+      `   - a solc version or range (ex: '0.4.22' or '^0.5.0')\n` +
+      `   - a docker image name (ex: 'stable')\n` +
+      `   - 'native' to use natively installed solc\n`;
+    return new Error(message);
+  }
+
+  public load() {
+    const userSpecification = this.config.version;
+
+    return new Promise(async (resolve, reject) => {
+      let strategy;
+      const useDocker = this.config.docker;
+      const useNative = userSpecification === 'native';
+      const useBundledSolc = !userSpecification;
+      const useSpecifiedLocal =
+        userSpecification && this.fileExists(userSpecification);
+      const isValidVersionRange = semver.validRange(userSpecification);
+
+      if (useDocker) {
+        strategy = new Docker(this.strategyOptions);
+      } else if (useNative) {
+        strategy = new Native(this.strategyOptions);
+      } else if (useBundledSolc) {
+        strategy = new Bundled(this.strategyOptions);
+      } else if (useSpecifiedLocal) {
+        strategy = new Local(this.strategyOptions);
+      } else if (isValidVersionRange) {
+        strategy = new VersionRange(this.strategyOptions);
+      }
+
+      if (strategy) {
+        try {
+          const solc = await strategy.load(userSpecification);
+          resolve(solc);
+        } catch (error) {
+          reject(error);
+        }
+      } else {
+        reject(this.badInputError(userSpecification));
+      }
+    });
+  }
+
+  public fileExists(localPath) {
+    return fs.existsSync(localPath) || path.isAbsolute(localPath);
+  }
+
+  public getDockerTags() {
+    return new Docker(this.strategyOptions).getDockerTags();
+  }
+
+  public getReleases() {
+    return new VersionRange(this.strategyOptions)
+      .getSolcVersions()
+      .then(list => {
+        const prereleases = list.builds
+          .filter(build => build['prerelease'])
+          .map(build => build['longVersion']);
+
+        const releases = Object.keys(list.releases);
+
+        return {
+          prereleases: prereleases,
+          releases: releases,
+          latestRelease: list.latestRelease,
+        };
+      });
+  }
+}

+ 13 - 0
src/analysis/mythx/compat/truffle-compile/compilerSupplier/loadingStrategies/Bundled.ts

@@ -0,0 +1,13 @@
+import LoadingStrategy from './LoadingStrategy';
+
+
+export default class Bundled extends LoadingStrategy {
+  public load() {
+    return this.getBundledSolc();
+  }
+
+  public getBundledSolc() {
+    this.removeListener();
+    return require('solc');
+  }
+}

+ 102 - 0
src/analysis/mythx/compat/truffle-compile/compilerSupplier/loadingStrategies/Docker.ts

@@ -0,0 +1,102 @@
+import * as request from 'request-promise';
+import * as fs from 'fs';
+import { execSync } from 'child_process';
+// import * as ora from 'ora';
+import * as semver from 'semver';
+import LoadingStrategy from './LoadingStrategy';
+import VersionRange from './VersionRange';
+
+
+export default class Docker extends LoadingStrategy {
+  public async load() {
+    const versionString = await this.validateAndGetSolcVersion();
+    const command =
+      'docker run -i ethereum/solc:' + this.config.version + ' --standard-json';
+
+    const versionRange = new VersionRange();
+    const commit = versionRange.getCommitFromVersion(versionString);
+
+    return versionRange
+      .getSolcByCommit(commit)
+      .then(solcjs => {
+        return {
+          compile: options => String(execSync(command, { input: options })),
+          importsParser: solcjs,
+          version: () => versionString,
+        };
+      })
+      .catch(error => {
+        if (error.message === 'No matching version found') {
+          throw this.errors('noVersion', versionString);
+        }
+        throw new Error(error);
+      });
+  }
+
+  public getDockerTags() {
+    return request(this.config.dockerTagsUrl)
+      .then(list => JSON.parse(list).results.map(item => item.name))
+      .catch(error => {
+        throw this.errors('noRequest', this.config.dockerTagsUrl, error);
+      });
+  }
+
+  public downloadDockerImage(image) {
+    if (!semver.valid(image)) {
+      const message =
+        `The image version you have provided is not valid.\n` +
+        `Please ensure that ${image} is a valid docker image name.`;
+      throw new Error(message);
+    }
+    // const spinner = ora({
+    //   color: 'red',
+    //   text: 'Downloading Docker image',
+    // }).start();
+    try {
+      execSync(`docker pull ethereum/solc:${image}`);
+      // spinner.stop();
+    } catch (error) {
+      // spinner.stop();
+      throw new Error(error);
+    }
+  }
+
+  public async validateAndGetSolcVersion() {
+    const image = this.config.version;
+    const fileName = image + '.version';
+
+    // Skip validation if they've validated for this image before.
+    if (this.fileIsCached(fileName)) {
+      const cachePath = this.resolveCache(fileName);
+      return fs.readFileSync(cachePath, 'utf-8');
+    }
+    // Image specified
+    if (!image) {
+      throw this.errors('noString', image);
+    }
+
+    // Docker exists locally
+    try {
+      execSync('docker -v');
+    } catch (error) {
+      throw this.errors('noDocker');
+    }
+
+    // Image exists locally
+    try {
+      execSync('docker inspect --type=image ethereum/solc:' + image);
+    } catch (error) {
+      console.log(`${image} does not exist locally.\n`);
+      console.log('Attempting to download the Docker image.');
+      this.downloadDockerImage(image);
+    }
+
+    // Get version & cache.
+    const version = execSync(
+      'docker run ethereum/solc:' + image + ' --version',
+    );
+    const normalized = new VersionRange().normalizeSolcVersion(version);
+    this.addFileToCache(normalized, fileName);
+    return normalized;
+  }
+}

+ 97 - 0
src/analysis/mythx/compat/truffle-compile/compilerSupplier/loadingStrategies/LoadingStrategy.ts

@@ -0,0 +1,97 @@
+import * as findCacheDir from 'find-cache-dir';
+import * as fs from 'fs';
+
+
+export default class LoadingStrategy {
+  public config: any;
+  public cachePath: any;
+
+  constructor(options = {}) {
+    const defaultConfig = {
+      versionsUrl: "https://solc-bin.ethereum.org/bin/list.json",
+      compilerUrlRoot: "https://solc-bin.ethereum.org/bin/",
+      dockerTagsUrl:
+        "https://registry.hub.docker.com/v2/repositories/ethereum/solc/tags/"
+    };
+    this.config = Object.assign({}, defaultConfig, options);
+    this.cachePath = findCacheDir({
+      name: "truffle",
+      cwd: __dirname,
+      create: true
+    });
+  }
+
+  public addFileToCache(code, fileName) {
+    const filePath = this.resolveCache(fileName);
+    fs.writeFileSync(filePath, code);
+  }
+
+  public errors(kind, input = '', error = '') {
+    const info = "Run `truffle compile --list` to see available versions.";
+
+    const kinds = {
+      noPath: "Could not find compiler at: " + input,
+      noVersion:
+        `Could not find a compiler version matching ${input}. ` +
+        `Please ensure you are specifying a valid version, constraint or ` +
+        `build in the truffle config. ${info}`,
+      noRequest:
+        "Failed to complete request to: " +
+        input +
+        ". Are you connected to the internet?\n\n" +
+        error,
+      noDocker:
+        "You are trying to run dockerized solc, but docker is not installed.",
+      noImage:
+        "Please pull " +
+        input +
+        " from docker before trying to compile with it.",
+      noNative: "Could not execute local solc binary: " + error,
+      noString:
+        "`compilers.solc.version` option must be a string specifying:\n" +
+        "   - a path to a locally installed solcjs\n" +
+        "   - a solc version or range (ex: '0.4.22' or '^0.5.0')\n" +
+        "   - a docker image name (ex: 'stable')\n" +
+        "   - 'native' to use natively installed solc\n" +
+        "Received: " +
+        input +
+        " instead."
+    };
+
+    return new Error(kinds[kind]);
+  }
+
+  public fileIsCached(fileName) {
+    const file = this.resolveCache(fileName);
+    return fs.existsSync(file);
+  }
+
+  public load(_userSpecification) {
+    throw new Error(
+      "Abstract method LoadingStrategy.load is not implemented for this strategy."
+    );
+  }
+
+  /**
+   * Cleans up error listeners set (by solc?) when requiring it. (This code inherited from
+   * previous implementation, note to self - ask Tim about this)
+   */
+  public removeListener() {
+    const listeners = process.listeners("uncaughtException");
+    const execeptionHandler = listeners[listeners.length - 1];
+
+    if (execeptionHandler) {
+      process.removeListener("uncaughtException", execeptionHandler);
+    }
+  }
+
+  public resolveCache(fileName) {
+    const thunk = findCacheDir({
+      name: "truffle",
+      cwd: __dirname,
+      thunk: true
+    });
+    return thunk(fileName);
+  }
+}
+

+ 24 - 0
src/analysis/mythx/compat/truffle-compile/compilerSupplier/loadingStrategies/Local.ts

@@ -0,0 +1,24 @@
+import * as path from 'path';
+import * as originalRequire from 'original-require';
+import LoadingStrategy from './LoadingStrategy';
+
+export default class Local extends LoadingStrategy {
+  public load(localPath) {
+    return this.getLocalCompiler(localPath);
+  }
+
+  public getLocalCompiler(localPath) {
+    let compiler, compilerPath;
+    compilerPath = path.isAbsolute(localPath)
+      ? localPath
+      : path.resolve(process.cwd(), localPath);
+
+    try {
+      compiler = originalRequire(compilerPath);
+      this.removeListener();
+    } catch (error) {
+      throw this.errors("noPath", localPath, error);
+    }
+    return compiler;
+  }
+}

+ 38 - 0
src/analysis/mythx/compat/truffle-compile/compilerSupplier/loadingStrategies/Native.ts

@@ -0,0 +1,38 @@
+import { execSync } from 'child_process';
+import LoadingStrategy from './LoadingStrategy';
+import VersionRange from './VersionRange';
+
+export default class Native extends LoadingStrategy {
+  public load() {
+    const versionString = this.validateAndGetSolcVersion();
+    const command = "solc --standard-json";
+
+    const versionRange = new VersionRange();
+    const commit = versionRange.getCommitFromVersion(versionString);
+    return versionRange
+      .getSolcByCommit(commit)
+      .then(solcjs => {
+        return {
+          compile: options => String(execSync(command, { input: options })),
+          version: () => versionString,
+          importsParser: solcjs
+        };
+      })
+      .catch(error => {
+        if (error.message === "No matching version found") {
+          throw this.errors("noVersion", versionString);
+        }
+        throw new Error(error);
+      });
+  }
+
+  public validateAndGetSolcVersion() {
+    let version;
+    try {
+      version = execSync("solc --version");
+    } catch (error) {
+      throw this.errors("noNative", null, error);
+    }
+    return new VersionRange().normalizeSolcVersion(version);
+  }
+}

+ 231 - 0
src/analysis/mythx/compat/truffle-compile/compilerSupplier/loadingStrategies/VersionRange.ts

@@ -0,0 +1,231 @@
+import * as Debug from 'debug';
+import * as requireFromString from 'require-from-string';
+import * as fs from 'fs';
+// import * as ora from 'ora';
+import * as originalRequire from 'original-require';
+import * as request from 'request-promise';
+import * as semver from 'semver';
+import * as solcWrap from 'solc/wrapper';
+import LoadingStrategy from './LoadingStrategy';
+
+const debug = Debug('compile:compilerSupplier');
+
+
+export default class VersionRange extends LoadingStrategy {
+  public compilerFromString(code) {
+    const soljson = requireFromString(code);
+    const wrapped = solcWrap(soljson);
+    this.removeListener();
+    return wrapped;
+  }
+
+  public findNewestValidVersion(version, allVersions) {
+    if (!semver.validRange(version)) {
+      return null;
+    }
+    const satisfyingVersions = Object.keys(allVersions.releases)
+      .map(solcVersion => {
+        if (semver.satisfies(solcVersion, version)) {
+          return solcVersion;
+        }
+      })
+      .filter(solcVersion => solcVersion);
+    if (satisfyingVersions.length > 0) {
+      return satisfyingVersions.reduce((newestVersion, v) => {
+        return semver.gtr(v, newestVersion) ? v : newestVersion;
+      }, '0.0.0');
+    } else {
+      return null;
+    }
+  }
+
+  public getCachedSolcByFileName(fileName) {
+    const filePath = this.resolveCache(fileName);
+    const soljson = originalRequire(filePath);
+    debug('soljson %o', soljson);
+    const wrapped = solcWrap(soljson);
+    this.removeListener();
+    return wrapped;
+  }
+
+  // Range can also be a single version specification like "0.5.0"
+  public getCachedSolcByVersionRange(version) {
+    const cachedCompilerFileNames = fs.readdirSync(this.cachePath);
+    const validVersions = cachedCompilerFileNames.filter(fileName => {
+      const match = fileName.match(/v\d+\.\d+\.\d+.*/);
+      if (match) {
+        return semver.satisfies(match[0], version);
+      }
+    });
+
+    const multipleValidVersions = validVersions.length > 1;
+    const compilerFileName = multipleValidVersions
+      ? this.getMostRecentVersionOfCompiler(validVersions)
+      : validVersions[0];
+    return this.getCachedSolcByFileName(compilerFileName);
+  }
+
+  public getCachedSolcFileName(commit) {
+    const cachedCompilerFileNames = fs.readdirSync(this.cachePath);
+    return cachedCompilerFileNames.find(fileName => {
+      return fileName.includes(commit);
+    });
+  }
+
+  public getCommitFromVersion(versionString) {
+    return 'commit.' + versionString.match(/commit\.(.*?)\./)[1];
+  }
+
+  public getMostRecentVersionOfCompiler(versions) {
+    return versions.reduce((mostRecentVersionFileName, fileName) => {
+      const match = fileName.match(/v\d+\.\d+\.\d+.*/);
+      const mostRecentVersionMatch = mostRecentVersionFileName.match(
+        /v\d+\.\d+\.\d+.*/,
+      );
+      return semver.gtr(match[0], mostRecentVersionMatch[0])
+        ? fileName
+        : mostRecentVersionFileName;
+    }, '-v0.0.0+commit');
+  }
+
+  public getSatisfyingVersionFromCache(versionRange) {
+    if (this.versionIsCached(versionRange)) {
+      return this.getCachedSolcByVersionRange(versionRange);
+    }
+    throw this.errors('noVersion', versionRange);
+  }
+
+  public async getSolcByCommit(commit) {
+    const solcFileName = this.getCachedSolcFileName(commit);
+    if (solcFileName) {
+      return this.getCachedSolcByFileName(solcFileName);
+    }
+
+    const allVersions = await this.getSolcVersions();
+    const fileName = this.getSolcVersionFileName(commit, allVersions);
+
+    if (!fileName) {
+      throw new Error('No matching version found');
+    }
+
+    return this.getSolcByUrlAndCache(fileName);
+  }
+
+  public async getSolcByUrlAndCache(fileName) {
+    const url = this.config.compilerUrlRoot + fileName;
+    // const spinner = ora({
+    //   color: 'red',
+    //   text: 'Downloading compiler',
+    // }).start();
+    try {
+      const response = await request.get(url);
+      // spinner.stop();
+      this.addFileToCache(response, fileName);
+      return this.compilerFromString(response);
+    } catch (error) {
+      // spinner.stop();
+      throw this.errors('noRequest', url, error);
+    }
+  }
+
+  public async getSolcFromCacheOrUrl(version) {
+    let allVersions;
+    try {
+      allVersions = await this.getSolcVersions();
+    } catch (error) {
+      throw this.errors('noRequest', version, error);
+    }
+
+    const fileName = this.getSolcVersionFileName(version, allVersions);
+    if (!fileName) {
+      throw this.errors('noVersion', version);
+    }
+
+    if (this.fileIsCached(fileName)) {
+      return this.getCachedSolcByFileName(fileName);
+    }
+
+    return this.getSolcByUrlAndCache(fileName);
+  }
+
+  public getSolcVersions() {
+    // const spinner = ora({
+    //   color: 'yellow',
+    //   text: 'Fetching solc version list from solc-bin',
+    // }).start();
+
+    return request(this.config.versionsUrl)
+      .then(list => {
+        // spinner.stop();
+        return JSON.parse(list);
+      })
+      .catch(err => {
+        // spinner.stop();
+        throw this.errors('noRequest', this.config.versionsUrl, err);
+      });
+  }
+
+  public getSolcVersionFileName(version, allVersions) {
+    if (allVersions.releases[version]) {
+      return allVersions.releases[version];
+    }
+
+    const isPrerelease =
+      version.includes('nightly') || version.includes('commit');
+
+    if (isPrerelease) {
+      for (const build of allVersions.builds) {
+        const exists =
+          build['prerelease'] === version ||
+          build['build'] === version ||
+          build['longVersion'] === version;
+
+        if (exists) {
+          return build['path'];
+        }
+      }
+    }
+
+    const versionToUse = this.findNewestValidVersion(version, allVersions);
+
+    if (versionToUse) {
+      return allVersions.releases[versionToUse];
+    }
+
+    return null;
+  }
+
+  public async load(versionRange) {
+    const rangeIsSingleVersion = semver.valid(versionRange);
+    if (rangeIsSingleVersion && this.versionIsCached(versionRange)) {
+      return this.getCachedSolcByVersionRange(versionRange);
+    }
+
+    try {
+      return await this.getSolcFromCacheOrUrl(versionRange);
+    } catch (error) {
+      if (error.message.includes('Failed to complete request')) {
+        return this.getSatisfyingVersionFromCache(versionRange);
+      }
+      throw new Error(error);
+    }
+  }
+
+  public normalizeSolcVersion(input) {
+    const version = String(input);
+    return version.split(':')[1].trim();
+  }
+
+  public versionIsCached(version) {
+    const cachedCompilerFileNames = fs.readdirSync(this.cachePath);
+    const cachedVersions = cachedCompilerFileNames.map(fileName => {
+      const match = fileName.match(/v\d+\.\d+\.\d+.*/);
+      if (match) {
+        return match[0];
+      }
+    });
+    return cachedVersions.find(cachedVersion =>
+      semver.satisfies(cachedVersion, version),
+    );
+  }
+}

+ 15 - 0
src/analysis/mythx/compat/truffle-compile/compilerSupplier/loadingStrategies/index.ts

@@ -0,0 +1,15 @@
+import Bundled from './Bundled';
+import Docker from './Docker';
+import LoadingStrategy from './LoadingStrategy';
+import Local from './Local';
+import Native from './Native';
+import VersionRange from './VersionRange';
+
+export default {
+  Bundled,
+  Docker,
+  LoadingStrategy,
+  Local,
+  Native,
+  VersionRange,
+};

+ 488 - 0
src/analysis/mythx/compat/truffle-compile/index.ts

@@ -0,0 +1,488 @@
+import * as assert from 'assert';
+import * as fs from 'fs';
+import * as OS from 'os';
+import * as path from 'path';
+import * as Profiler from './profiler';
+import * as expect from 'truffle-expect';
+import * as find_contracts from 'truffle-contract-sources';
+import * as Config from 'truffle-config';
+import * as Debug from 'debug';
+import CompileError from './compileerror';
+import CompilerSupplier from './compilerSupplier';
+
+const debug = Debug('compile'); // eslint-disable-line no-unused-vars
+
+
+function getFileContent(filepath: string) {
+  const stats: any = fs.statSync(filepath);
+  if (stats.isFile()) {
+    return fs.readFileSync(filepath).toString();
+  } else {
+    throw new Error (`File ${filepath} not found`);
+  }
+}
+
+function findImports(pathName: string) {
+  try {
+    return { contents: getFileContent(pathName) };
+  } catch (e) {
+    return { error: e.message };
+  }
+}
+
+const getSourceFileName = sourcePath => {
+  let shortName = path.basename(sourcePath);
+  if (shortName.endsWith('.sol')) {
+    shortName = shortName.slice(0, -4);
+  }
+  return shortName;
+};
+
+function sourcePath2BuildPath(sourcePath, buildDir) {
+  const shortName = getSourceFileName(sourcePath);
+  return path.join(buildDir, shortName + '.json');
+}
+
+/* returns true if directory/file out of date
+*/
+function staleBuildContract (sourcePath, buildPath) {
+    let sourcePathStat, buildPathStat;
+    try {
+        sourcePathStat = fs.statSync(sourcePath);
+    } catch (err) {
+        return true;
+    }
+    try {
+        buildPathStat = fs.statSync(buildPath);
+    } catch (err) {
+        return true;
+    }
+
+    const sourceMtime = sourcePathStat.mtime;
+    const buildMtime = buildPathStat.mtime;
+    return sourceMtime > buildMtime;
+}
+
+
+// Recent versions of truffle seem to add __ to the end of the bytecode
+const cleanBytecode = bytecode => {
+  let cleanedBytecode = bytecode.replace(/_.+$/, '');
+  cleanedBytecode = `0x${cleanedBytecode}`;
+  return cleanedBytecode;
+};
+
+
+const normalizeJsonOutput = jsonObject => {
+  const { contracts, sources, compiler, updatedAt } = jsonObject;
+  const result = {
+    compiler,
+    sources: {},
+    updatedAt,
+  };
+
+  for (const [ sourcePath, solData ] of Object.entries(contracts)) {
+      if (!result.sources[sourcePath]) {
+          result.sources[sourcePath] = {
+              // sourcePath,
+              contracts: [],
+          };
+      }
+      for (const [ contractName, contractData ] of Object.entries(solData)) {
+          const o = {
+              bytecode: cleanBytecode(contractData.evm.bytecode.object),
+              contractName,
+              deployedBytecode: cleanBytecode(contractData.evm.deployedBytecode.object),
+              deployedSourceMap: contractData.evm.deployedBytecode.sourceMap,
+              sourceMap: contractData.evm.bytecode.sourceMap,
+          };
+
+          result.sources[sourcePath].contracts.push(o);
+      }
+  }
+
+  for (const entry of Object.entries(sources)) {
+    const sourcePath: any = entry[0];
+    const solData: any = entry[1];
+
+    if (!result.sources[sourcePath]) {
+      continue;
+    }
+    result.sources[sourcePath].ast = solData.ast;
+    result.sources[sourcePath].legacyAST = solData.legacyAST;
+    result.sources[sourcePath].id = solData.id;
+    result.sources[sourcePath].source = getFileContent(sourcePath);
+  }
+
+  return result;
+};
+
+// Most basic of the compile commands. Takes a sources, where
+// the keys are file or module paths and the values are the bodies of
+// the contracts. Does not evaulate dependencies that aren't already given.
+//
+// Default options:
+// {
+//   strict: false,
+//   quiet: false,
+//   logger: console
+// }
+const compile = (sourcePath, sourceText, options, callback, isStale) => {
+  if (typeof options === "function") {
+    callback = options;
+    options = {};
+  }
+
+  if (options.logger === undefined) options.logger = console;
+
+  const hasTargets =
+    options.compilationTargets && options.compilationTargets.length;
+
+  expect.options(options, ["contracts_directory", "compilers"]);
+
+  expect.options(options.compilers, ["solc"]);
+
+  options.compilers.solc.settings.evmVersion =
+    options.compilers.solc.settings.evmVersion ||
+    options.compilers.solc.evmVersion ||
+    {};
+  options.compilers.solc.settings.optimizer =
+    options.compilers.solc.settings.optimizer ||
+    options.compilers.solc.optimizer ||
+    {};
+
+  // Ensure sources have operating system independent paths
+  // i.e., convert backslashes to forward slashes; things like C: are left intact.
+  const operatingSystemIndependentSources = {};
+  const operatingSystemIndependentTargets = {};
+  const originalPathMappings = {};
+
+  const defaultSelectors = {
+    "": ["legacyAST", "ast"],
+    "*": [
+      "abi",
+      "evm.bytecode.object",
+      "evm.bytecode.sourceMap",
+      "evm.deployedBytecode.object",
+      "evm.deployedBytecode.sourceMap",
+      "userdoc",
+      "devdoc"
+    ]
+  };
+
+  // Specify compilation targets
+  // Each target uses defaultSelectors, defaulting to single target `*` if targets are unspecified
+  const outputSelection = {};
+  const targets = operatingSystemIndependentTargets;
+  const targetPaths = Object.keys(targets);
+
+  targetPaths.length
+    ? targetPaths.forEach(key => (outputSelection[key] = defaultSelectors))
+    : (outputSelection["*"] = defaultSelectors);
+
+  const solcStandardInput = {
+    language: "Solidity",
+    sources: {},
+    settings: {
+      evmVersion: options.compilers.solc.settings.evmVersion,
+      optimizer: options.compilers.solc.settings.optimizer,
+      outputSelection
+    }
+  };
+
+  // Load solc module only when compilation is actually required.
+  const supplier = new CompilerSupplier(options.compilers.solc);
+
+  supplier
+    .load()
+    .then((solc: any) => {
+
+      const solcVersion = solc.version();
+      solcStandardInput.sources = {
+        [sourcePath]: {
+          content: sourceText
+        },
+      };
+
+      const result = solc.compile(JSON.stringify(solcStandardInput), findImports);
+
+      const standardOutput = JSON.parse(result);
+
+      let errors = standardOutput.errors || [];
+      let warnings = [];
+
+      if (options.strict !== true) {
+        warnings = errors.filter(function(error) {
+          return error.severity === "warning";
+        });
+
+        errors = errors.filter(function(error) {
+          return error.severity !== "warning";
+        });
+
+        if (options.quiet !== true && warnings.length > 0) {
+          options.logger.log(
+            OS.EOL + "Compilation warnings encountered:" + OS.EOL
+          );
+          options.logger.log(
+            warnings
+              .map(function(warning) {
+                return warning.formattedMessage;
+              })
+              .join(),
+          );
+        }
+      }
+
+      if (errors.length > 0) {
+        options.logger.log("");
+        return callback(
+          new CompileError(
+            standardOutput.errors
+              .map(function(error) {
+                return error.formattedMessage;
+              })
+              .join()
+          )
+        );
+      }
+
+      standardOutput.compiler =  {
+        name: "solc",
+        version: solcVersion
+      };
+      standardOutput.source = sourceText;
+      standardOutput.updatedAt = new Date();
+
+      const normalizedOutput = normalizeJsonOutput(standardOutput)
+
+      // FIXME: the below return path is hoaky, because it is in the format that
+      // the multiPromisify'd caller in workflow-compile expects.
+      const shortName = getSourceFileName(sourcePath);
+
+      callback(null, {[shortName]: normalizedOutput}, isStale);
+    })
+    .catch(callback);
+};
+
+/** From original truffle-compile. This is not used yet.
+**/
+function replaceLinkReferences(bytecode, linkReferences, libraryName) {
+  let linkId = "__" + libraryName;
+
+  while (linkId.length < 40) {
+    linkId += "_";
+  }
+
+  linkReferences.forEach(function(ref) {
+    // ref.start is a byte offset. Convert it to character offset.
+    const start = ref.start * 2 + 2;
+
+    bytecode =
+      bytecode.substring(0, start) + linkId + bytecode.substring(start + 40);
+  });
+
+  return bytecode;
+}
+
+/** From original truffle-compile. This is not used yet.
+**/
+function orderABI(contract) {
+  let contract_definition;
+  const ordered_function_names = [];
+
+  for (let i = 0; i < contract.legacyAST.children.length; i++) {
+    const definition = contract.legacyAST.children[i];
+
+    // AST can have multiple contract definitions, make sure we have the
+    // one that matches our contract
+    if (
+      definition.name !== "ContractDefinition" ||
+      definition.attributes.name !== contract.contract_name
+    ) {
+      continue;
+    }
+
+    contract_definition = definition;
+    break;
+  }
+
+  if (!contract_definition) {
+    return contract.abi;
+  }
+  if (!contract_definition.children) {
+    return contract.abi;
+  }
+
+  contract_definition.children.forEach(function(child) {
+    if (child.name === "FunctionDefinition") {
+      ordered_function_names.push(child.attributes.name);
+    }
+  });
+
+  // Put function names in a hash with their order, lowest first, for speed.
+  const functions_to_remove = ordered_function_names.reduce(function(
+    obj,
+    value,
+    index,
+  ) {
+    obj[value] = index;
+    return obj;
+  },
+  {});
+
+  // Filter out functions from the abi
+  let function_definitions: any = contract.abi.filter(function(item) {
+    return functions_to_remove[item.name] !== undefined;
+  });
+
+  // Sort removed function defintions
+  function_definitions = function_definitions.sort(function(item_a, item_b) {
+    const a = functions_to_remove[item_a.name];
+    const b = functions_to_remove[item_b.name];
+
+    if (a > b) {
+      return 1;
+    }
+    if (a < b) {
+      return -1;
+    }
+    return 0;
+  });
+
+  // Create a new ABI, placing ordered functions at the end.
+  const newABI = [];
+  contract.abi.forEach(function(item) {
+    if (functions_to_remove[item.name] !== undefined) {
+      return;
+    }
+    newABI.push(item);
+  });
+
+  // Now pop the ordered functions definitions on to the end of the abi..
+  Array.prototype.push.apply(newABI, function_definitions);
+
+  return newABI;
+}
+
+// contracts_directory: String. Directory where .sol files can be found.
+// quiet: Boolean. Suppress output. Defaults to false.
+// strict: Boolean. Return compiler warnings as errors. Defaults to false.
+const all = function(options, callback) {
+  find_contracts(options.contracts_directory, function(err, files) {
+    if (err) {
+      return callback(err);
+    }
+
+    options.paths = files;
+    with_dependencies(options, callback, true);
+  });
+};
+
+// contracts_directory: String. Directory where .sol files can be found.
+// build_directory: String. Optional. Directory where .sol.js files can be found. Only required if `all` is false.
+// all: Boolean. Compile all sources found. Defaults to true. If false, will compare sources against built files
+//      in the build directory to see what needs to be compiled.
+// quiet: Boolean. Suppress output. Defaults to false.
+// strict: Boolean. Return compiler warnings as errors. Defaults to false.
+const necessary = function(options, callback) {
+  options.logger = options.logger || console;
+
+  Profiler.updated(options, function(err, updated) {
+    if (err) {
+      return callback(err);
+    }
+
+    if (updated.length === 0 && options.quiet !== true) {
+      return callback(null, [], {});
+    }
+
+    options.paths = updated;
+    with_dependencies(options, callback, false);
+  });
+};
+
+const with_dependencies = (options, callback, compileAll) => {
+  options.logger = options.logger || console;
+  options.contracts_directory = options.contracts_directory || process.cwd();
+
+  expect.options(options, [
+    "paths",
+    "working_directory",
+    "contracts_directory",
+    "resolver"
+  ]);
+
+  const config = Config.default().merge(options);
+
+  Profiler.required_sources(
+    config.with({
+      paths: options.paths,
+      base_path: options.contracts_directory,
+      resolver: options.resolver
+    }),
+    (err, allSources, required) => {
+      if (err) {
+        return callback(err);
+      }
+
+      // Filter out of the list of files to be compiled those for which we have a JSON that
+      // is newer than the last modified time of the source file.
+      const filteredRequired = [];
+      for (const sourcePath of options.paths) {
+        const targetJsonPath = sourcePath2BuildPath(sourcePath, options.build_mythx_contracts);
+        if (compileAll || staleBuildContract(sourcePath, targetJsonPath)) {
+          // Set for compilation
+          filteredRequired.push(sourcePath);
+        } else {
+          // Pick up from existing JSON
+          const buildJson = fs.readFileSync(targetJsonPath, 'utf8');
+          const buildObj = JSON.parse(buildJson);
+          const shortName = getSourceFileName(sourcePath);
+          callback(null, {[shortName]: buildObj}, false);
+          return;
+        }
+      }
+      const hasTargets = filteredRequired.length;
+
+      hasTargets
+        ? display(filteredRequired, options)
+        : display(allSources, options);
+
+      for (const sourcePath of filteredRequired) {
+        if (!sourcePath.endsWith('/Migrations.sol')) {
+          compile(sourcePath, allSources[sourcePath], options, callback, true);
+        }
+      }
+    });
+};
+
+const display = (paths, options) => {
+  if (options.quiet !== true) {
+    if (!Array.isArray(paths)) {
+      paths = Object.keys(paths);
+    }
+
+    const blacklistRegex = /^truffle\/|\/Migrations.sol$/;
+
+    paths.sort().forEach(contract => {
+      if (path.isAbsolute(contract)) {
+        contract =
+          "." + path.sep + path.relative(options.working_directory, contract);
+      }
+      if (contract.match(blacklistRegex)) {
+        return;
+      }
+      options.logger.log("Compiling " + contract + "...");
+    });
+  }
+};
+
+
+export default {
+  CompilerSupplier,
+  all,
+  compile,
+  display,
+  necessary,
+  with_dependencies,
+};

+ 92 - 0
src/analysis/mythx/compat/truffle-compile/parser.ts

@@ -0,0 +1,92 @@
+import * as Debug from 'debug';
+import CompileError from './compileerror';
+
+
+const debug = Debug("compile:parser"); // eslint-disable-line no-unused-vars
+// Warning issued by a pre-release compiler version, ignored by this component.
+const preReleaseCompilerWarning =
+  "This is a pre-release compiler version, please do not use it in production.";
+
+export const parseImports = (body, solc) => {
+  // WARNING: Kind of a hack (an expedient one).
+
+  // So we don't have to maintain a separate parser, we'll get all the imports
+  // in a file by sending the file to solc and evaluating the error messages
+  // to see what import statements couldn't be resolved. To prevent full-on
+  // compilation when a file has no import statements, we inject an import
+  // statement right on the end; just to ensure it will error and we can parse
+  // the imports speedily without doing extra work.
+
+  // If we're using docker/native, we'll still want to use solcjs to do this part.
+  if (solc.importsParser) {
+    solc = solc.importsParser;
+  }
+
+  // Helper to detect import errors with an easy regex.
+  const importErrorKey = "TRUFFLE_IMPORT";
+
+  // Inject failing import.
+  const failingImportFileName = "__Truffle__NotFound.sol";
+
+  body = body + "\n\nimport '" + failingImportFileName + "';\n";
+
+  const solcStandardInput = {
+    language: "Solidity",
+    sources: {
+      "ParsedContract.sol": {
+        content: body
+      }
+    },
+    settings: {
+      outputSelection: {
+        "ParsedContract.sol": {
+          "*": [] // We don't need any output.
+        }
+      }
+    }
+  };
+
+  let output = solc.compile(JSON.stringify(solcStandardInput), function() {
+    // The existence of this function ensures we get a parsable error message.
+    // Without this, we'll get an error message we *can* detect, but the key will make it easier.
+    // Note: This is not a normal callback. See docs here: https://github.com/ethereum/solc-js#from-version-021
+    return { error: importErrorKey };
+  });
+
+  output = JSON.parse(output);
+
+  // Filter out the "pre-release compiler" warning, if present.
+  const errors = output.errors.filter(function(solidity_error) {
+    return solidity_error.message.indexOf(preReleaseCompilerWarning) < 0;
+  });
+
+  const nonImportErrors = errors.filter(function(solidity_error) {
+    // If the import error key is not found, we must not have an import error.
+    // This means we have a *different* parsing error which we should show to the user.
+    // Note: solc can return multiple parsing errors at once.
+    // We ignore the "pre-release compiler" warning message.
+    return solidity_error.formattedMessage.indexOf(importErrorKey) < 0;
+  });
+
+  // Should we try to throw more than one? (aside; we didn't before)
+  if (nonImportErrors.length > 0) {
+    throw new CompileError(nonImportErrors[0].formattedMessage);
+  }
+
+  // Now, all errors must be import errors.
+  // Filter out our forced import, then get the import paths of the rest.
+  const imports = errors
+    .filter(function(solidity_error) {
+      return solidity_error.message.indexOf(failingImportFileName) < 0;
+    })
+    .map(function(solidity_error) {
+      const matches = solidity_error.formattedMessage.match(
+        /import[^'"]+("|')([^'"]+)("|')/
+      );
+
+      // Return the item between the quotes.
+      return matches[2];
+    });
+
+  return imports;
+};

+ 423 - 0
src/analysis/mythx/compat/truffle-compile/profiler.ts

@@ -0,0 +1,423 @@
+// Compares .sol files to their .sol.js counterparts,
+// determines which .sol files have been updated.
+
+import * as path from 'path';
+import * as async from 'async';
+import * as fs from 'fs';
+import * as Parser from './parser';
+import CompilerSupplier from './compilerSupplier';
+import * as expect from 'truffle-expect';
+import * as find_contracts from 'truffle-contract-sources';
+import * as Debug from 'debug';
+
+
+const debug = Debug('compile:profile"'); // eslint-disable-line no-unused-vars
+
+export const updated = (options, callback) => {
+  expect.options(options, ['resolver']);
+
+  const contracts_directory = options.contracts_directory;
+
+  const build_directory = options.build_mythx_contracts;
+
+  function getFiles(done) {
+    if (options.files) {
+      done(null, options.files);
+    } else {
+      find_contracts(contracts_directory, done);
+    }
+  }
+
+  const sourceFilesArtifacts = {};
+  const sourceFilesArtifactsUpdatedTimes = {};
+
+  const updatedFiles = [];
+
+  async.series(
+    [
+      // Get all the source files and create an object out of them.
+      function(c) {
+        getFiles(function(err, files) {
+          if (err) {
+            return c(err);
+          }
+
+          // Use an object for O(1) access.
+          files.forEach(function(sourceFile) {
+            sourceFilesArtifacts[sourceFile] = [];
+          });
+
+          c();
+        });
+      },
+      // Get all the artifact files, and read them, parsing them as JSON
+      function(c) {
+        fs.readdir(build_directory, function(err, build_files) {
+          if (err) {
+            // The build directory may not always exist.
+            if (
+              err.message.indexOf('ENOENT: no such file or directory') >= 0
+            ) {
+              // Ignore it.
+              build_files = [];
+            } else {
+              return c(err);
+            }
+          }
+
+          build_files = build_files.filter(function(build_file) {
+            return path.extname(build_file) === '.json';
+          });
+
+          async.map(
+            build_files,
+            function(buildFile, finished) {
+              fs.readFile(
+                path.join(build_directory, buildFile),
+                'utf8',
+                function(e, body) {
+                  if (e) {
+                    return finished(e);
+                  }
+                  finished(null, body);
+                },
+              );
+            },
+            function(e, jsonData) {
+              if (err) {
+                return c(e);
+              }
+
+              try {
+                for (let i = 0; i < jsonData.length; i++) {
+                  const data = JSON.parse(jsonData[i]);
+
+                  // In case there are artifacts from other source locations.
+                  if (sourceFilesArtifacts[data.sourcePath] == null) {
+                    sourceFilesArtifacts[data.sourcePath] = [];
+                  }
+
+                  sourceFilesArtifacts[data.sourcePath].push(data);
+                }
+              } catch (e) {
+                return c(e);
+              }
+
+              c();
+            },
+          );
+        });
+      },
+      function(c) {
+        // Get the minimum updated time for all of a source file's artifacts
+        // (note: one source file might have multiple artifacts).
+        Object.keys(sourceFilesArtifacts).forEach(function(sourceFile) {
+          const artifacts = sourceFilesArtifacts[sourceFile];
+
+          sourceFilesArtifactsUpdatedTimes[sourceFile] = artifacts.reduce(
+            function(minimum, current) {
+              const updatedAt = new Date(current.updatedAt).getTime();
+
+              if (updatedAt < minimum) {
+                return updatedAt;
+              }
+              return minimum;
+            },
+            Number.MAX_SAFE_INTEGER,
+          );
+
+          // Empty array?
+          if (
+            sourceFilesArtifactsUpdatedTimes[sourceFile] ===
+            Number.MAX_SAFE_INTEGER
+          ) {
+            sourceFilesArtifactsUpdatedTimes[sourceFile] = 0;
+          }
+        });
+
+        c();
+      },
+      // Stat all the source files, getting there updated times, and comparing them to
+      // the artifact updated times.
+      function(c) {
+        const sourceFiles = Object.keys(sourceFilesArtifacts);
+
+        async.map(
+          sourceFiles,
+          function(sourceFile, finished) {
+            fs.stat(sourceFile, function(err, stat) {
+              if (err) {
+                // Ignore it. This means the source file was removed
+                // but the artifact file possibly exists. Return null
+                // to signfy that we should ignore it.
+                stat = null;
+              }
+              finished(null, stat);
+            });
+          },
+          function(err, sourceFileStats) {
+            if (err) {
+              return callback(err);
+            }
+
+            sourceFiles.forEach(function(sourceFile, index) {
+              const sourceFileStat = sourceFileStats[index];
+
+              // Ignore updating artifacts if source file has been removed.
+              if (sourceFileStat == null) {
+                return;
+              }
+
+              const artifactsUpdatedTime =
+                sourceFilesArtifactsUpdatedTimes[sourceFile] || 0;
+              const sourceFileUpdatedTime = (
+                sourceFileStat.mtime || sourceFileStat.ctime
+              ).getTime();
+
+              if (sourceFileUpdatedTime > artifactsUpdatedTime) {
+                updatedFiles.push(sourceFile);
+              }
+            });
+
+            c();
+          },
+        );
+      },
+    ],
+    function(err) {
+      callback(err, updatedFiles);
+    },
+  );
+};
+
+// Returns the minimal set of sources to pass to solc as compilations targets,
+// as well as the complete set of sources so solc can resolve the comp targets' imports.
+export const required_sources = (options, callback) => {
+  expect.options(options, ['paths', 'base_path', 'resolver']);
+
+  const resolver = options.resolver;
+
+  // Fetch the whole contract set
+  find_contracts(options.contracts_directory, (err, allPaths) => {
+    if (err) {
+      return callback(err);
+    }
+
+    // Solidity test files might have been injected. Include them in the known set.
+    options.paths.forEach(_path => {
+      if (!allPaths.includes(_path)) {
+        allPaths.push(_path);
+      }
+    });
+
+    const updates = convert_to_absolute_paths(options.paths, options.base_path)
+      .sort();
+    allPaths = convert_to_absolute_paths(allPaths, options.base_path)
+      .sort();
+
+    const allSources = {};
+    const compilationTargets = [];
+
+    // Load compiler
+    const supplier = new CompilerSupplier(options.compilers.solc);
+    supplier
+      .load()
+      .then(solc => {
+        // Get all the source code
+        resolveAllSources(resolver, allPaths, solc, (e, resolved) => {
+          if (e) {
+            return callback(e);
+          }
+
+          // Generate hash of all sources including external packages - passed to solc inputs.
+          const resolvedPaths = Object.keys(resolved);
+          resolvedPaths.forEach(
+            file => (allSources[file] = resolved[file].body),
+          );
+
+          // Exit w/out minimizing if we've been asked to compile everything, or nothing.
+          if (listsEqual(options.paths, allPaths)) {
+            return callback(null, allSources, {});
+          } else if (!options.paths.length) {
+            return callback(null, {}, {});
+          }
+
+          // Seed compilationTargets with known updates
+          updates.forEach(update => compilationTargets.push(update));
+
+          // While there are updated files in the queue, we take each one
+          // and search the entire file corpus to find any sources that import it.
+          // Those sources are added to list of compilation targets as well as
+          // the update queue because their own ancestors need to be discovered.
+          async.whilst(
+            () => updates.length > 0,
+            updateFinished => {
+              const currentUpdate = updates.shift();
+              const files = allPaths.slice();
+
+              // While files: dequeue and inspect their imports
+              async.whilst(
+                () => files.length > 0,
+                fileFinished => {
+                  const currentFile = files.shift();
+
+                  // Ignore targets already selected.
+                  if (compilationTargets.includes(currentFile)) {
+                    return fileFinished();
+                  }
+
+                  let imports;
+                  try {
+                    imports = getImports(
+                      currentFile,
+                      resolved[currentFile],
+                      solc,
+                    );
+                  } catch (err) {
+                    err.message =
+                      'Error parsing ' + currentFile + ': ' + err.message;
+                    return fileFinished(err);
+                  }
+
+                  // If file imports a compilation target, add it
+                  // to list of updates and compilation targets
+                  if (imports.includes(currentUpdate)) {
+                    updates.push(currentFile);
+                    compilationTargets.push(currentFile);
+                  }
+
+                  fileFinished();
+                },
+                error => {
+                  return updateFinished(error);
+                },
+              );
+            },
+            error => {
+                return error
+                  ? callback(error)
+                  : callback(null, allSources, compilationTargets);
+              },
+          );
+        });
+      })
+      .catch(callback);
+  });
+};
+
+// Resolves sources in several async passes. For each resolved set it detects unknown
+// imports from external packages and adds them to the set of files to resolve.
+export const resolveAllSources = (resolver, initialPaths, solc, callback) => {
+  const mapping = {};
+  const allPaths = initialPaths.slice();
+
+  function generateMapping(finished) {
+    const promises = [];
+
+    // Dequeue all the known paths, generating resolver promises,
+    // We'll add paths if we discover external package imports.
+    while (allPaths.length) {
+      let file;
+      let parent = null;
+
+      const candidate = allPaths.shift();
+
+      // Some paths will have been extracted as imports from a file
+      // and have information about their parent location we need to track.
+      if (typeof candidate === 'object') {
+        file = candidate.file;
+        parent = candidate.parent;
+      } else {
+        file = candidate;
+      }
+      const promise = new Promise((accept, reject) => {
+        resolver.resolve(file, parent, (err, body, absolutePath, source) => {
+          err
+            ? reject(err)
+            : accept({ file: absolutePath, body: body, source: source });
+        });
+      });
+      promises.push(promise);
+    }
+
+    // Resolve everything known and add it to the map, then inspect each file's
+    // imports and add those to the list of paths to resolve if we don't have it.
+    Promise.all(promises)
+      .then(results => {
+        // Generate the sources mapping
+        results.forEach(
+          item => (mapping[item.file] = Object.assign({}, item)),
+        );
+
+        // Queue unknown imports for the next resolver cycle
+        while (results.length) {
+          const result = results.shift();
+
+          // Inspect the imports
+          let imports;
+          try {
+            imports = getImports(result.file, result, solc);
+          } catch (err) {
+            err.message = 'Error parsing ' + result.file + ': ' + err.message;
+            return finished(err);
+          }
+
+          // Detect unknown external packages / add them to the list of files to resolve
+          // Keep track of location of this import because we need to report that.
+          imports.forEach(item => {
+            if (!mapping[item]) {
+              allPaths.push({ file: item, parent: result.file });
+            }
+          });
+        }
+      })
+      .catch(finished)
+      .then(finished);
+  }
+
+  async.whilst(
+    () => allPaths.length,
+    generateMapping,
+    err => (err ? callback(err) : callback(null, mapping)),
+  );
+};
+
+export const getImports = (file, resolved, solc) => {
+  const imports = Parser.parseImports(resolved.body, solc);
+
+  // Convert explicitly relative dependencies of modules back into module paths.
+  return imports.map(dependencyPath => {
+    return isExplicitlyRelative(dependencyPath)
+      ? resolved.source.resolve_dependency_path(file, dependencyPath)
+      : dependencyPath;
+  });
+};
+
+export const listsEqual = (listA, listB) => {
+  const a = listA.sort();
+  const b = listB.sort();
+
+  return JSON.stringify(a) === JSON.stringify(b);
+};
+
+export const convert_to_absolute_paths = (paths: any, base: any) => {
+  return paths.map(function(p) {
+    // If it's anabsolute paths, leave it alone.
+
+    if (path.isAbsolute(p)) {
+      return p;
+    }
+
+    // If it's not explicitly relative, then leave it alone (i.e., it's a module).
+    if (!isExplicitlyRelative(p)) {
+      return p;
+    }
+
+    // Path must be explicitly releative, therefore make it absolute.
+    return path.resolve(path.join(base, p));
+  });
+};
+
+export const isExplicitlyRelative = (import_path: any) => {
+  return import_path.indexOf('.') === 0;
+};

+ 75 - 61
src/analysis/mythx/index.ts

@@ -14,7 +14,7 @@ import {SolcCompiler} from '../../solcCompiler';
 
 
 import * as Config from 'truffle-config';
-import { compile } from 'truffle-workflow-compile';
+import Contracts from './wfc';
 import * as stripAnsi from 'strip-ansi';
 
 
@@ -48,7 +48,7 @@ interface SolidityMythXOption {
 // const contractsCompile = util.promisify(contracts.compile);
 const contractsCompile = config => {
     return new Promise((resolve, reject) => {
-        compile(config, (err, result) => {
+        Contracts.compile(config, (err, result) => {
             if (err) {
                 reject(err);
                 return ;
@@ -127,6 +127,7 @@ function solidityPathAndSource() {
 
     return {
         buildContractsDir: trufstuf.getBuildContractsDir(rootDir),
+        buildMythxContractsDir: trufstuf.getBuildMythxContractsDir(rootDir),
         code: contractCode,
         path: contractPath,
         rootDir: rootDir,
@@ -262,22 +263,12 @@ async function analyzeWithBuildDir({
         return;
     }
 
-    const obj = new mythx.MythXIssues(buildObj);
-
-    const mythxBuilObj: any = obj.getBuildObj();
-    const analyzeOpts = {
-        clientToolName: 'vscode-solidity',
-        data: mythxBuilObj,
-        timeout: solidityConfig.mythx.timeout * 1000,  // convert secs to millisecs
-    };
-
-    analyzeOpts.data.analysisMode = solidityConfig.mythx.analysisMode;
-
-    const contractName: string = buildObj.contractName;
+    const contracts = mythx.newTruffleObjToOldTruffleByContracts(buildObj);
 
     const timeout = solidityConfig.mythx.timeout;
-    const progressStep = 100 / timeout;
+    const progressStep = 100 / (timeout * contracts.length);
     let progressBarcurrStep = 0;
+    let currentContract: string;
     let progressBarInterval = setInterval(() => {
         if (progressBarInterval && progressBarcurrStep >= 100) {
             clearInterval(progressBarInterval);
@@ -285,53 +276,64 @@ async function analyzeWithBuildDir({
             return ;
         }
         progressBarcurrStep += progressStep;
-
-        progress.report({ increment: progressBarcurrStep, message: `Running ${contractName}` });
+        const message = currentContract ? `Running ${currentContract}` : 'Running...';
+        progress.report({ increment: progressBarcurrStep, message });
     }, 1000);
 
-    let mythXresult: any;
-    try {
-        mythXresult = await client.analyzeWithStatus(analyzeOpts);
-
-        if (progressBarcurrStep < 100 ) {
-            progressBarcurrStep = 100;
-            progress.report({ increment: progressBarcurrStep, message: `Running ${contractName}` });
-        }
-        obj.setIssues(mythXresult.issues);
-        if (!config.style) {
-            config.style = 'stylish';
-        }
-        const spaceLimited: boolean = ['tap', 'markdown'].indexOf(config.style) === -1;
-        const eslintIssues = obj.getEslintIssues(spaceLimited);
-        const formatter = getFormatter(solidityConfig.mythx.reportFormat);
-        const groupedEslintIssues = groupEslintIssuesByBasename(eslintIssues);
-
-        const uniqueIssues = getUniqueIssues(groupedEslintIssues);
-        showMessage(formatter(uniqueIssues));
-
-        const reportsDir = trufstuf.getMythReportsDir(buildContractsDir);
-        const mdData = {
-            analysisMode: analyzeOpts.data.analysisMode,
-            compilerVersion: analyzeOpts.data.version,
-            contractName,
-            groupedEslintIssues,
-            reportsDir: reportsDir,
-            sourcePath: mythxBuilObj.sourceList[0], // FIXME: We currently analyze single file. It's ok to take first item
-            status: mythXresult.status,
-            timeout: solidityConfig.mythx.timeout,
-            // Add stuff like mythx version
+    const analysisResults = await Promise.all(contracts.map(async (contract: any) => {
+        const obj = new mythx.MythXIssues(contract, config);
+        const mythxBuilObj: any = obj.getBuildObj();
+        currentContract = obj.contractName;
+        const analyzeOpts = {
+            clientToolName: 'vscode-solidity',
+            data: mythxBuilObj,
+            timeout: solidityConfig.mythx.timeout * 1000,  // convert secs to millisecs
         };
-        await writeMarkdownReportAsync(mdData);
-    } catch (err) {
-        if (progressBarInterval) {
-            clearInterval(progressBarInterval);
-            progressBarInterval = null;
+        analyzeOpts.data.analysisMode = solidityConfig.mythx.analysisMode;
+        let mythXresult: any;
+        try {
+            mythXresult = await client.analyzeWithStatus(analyzeOpts);
+
+            if (progressBarcurrStep < 100 ) {
+                progressBarcurrStep = 100;
+                progress.report({ increment: progressBarcurrStep, message: `Running ${obj.contractName}` });
+            }
+            obj.setIssues(mythXresult.issues);
+            if (!config.style) {
+                config.style = 'stylish';
+            }
+            const spaceLimited: boolean = ['tap', 'markdown'].indexOf(config.style) === -1;
+            const eslintIssues = obj.getEslintIssues(spaceLimited);
+            const groupedEslintIssues = groupEslintIssuesByBasename(eslintIssues);
+
+            const uniqueIssues = getUniqueIssues(groupedEslintIssues);
+
+            const reportsDir = trufstuf.getMythReportsDir(pathInfo.buildMythxContractsDir);
+            const mdData = {
+                analysisMode: analyzeOpts.data.analysisMode,
+                compilerVersion: analyzeOpts.data.version,
+                contractName: obj.contractName,
+                groupedEslintIssues,
+                reportsDir: reportsDir,
+                sourcePath: mythxBuilObj.sourceList[0], // FIXME: We currently analyze single file. It's ok to take first item
+                status: mythXresult.status,
+                timeout: solidityConfig.mythx.timeout,
+                // Add stuff like mythx version
+            };
+            await writeMarkdownReportAsync(mdData);
+            return uniqueIssues;
+        } catch (err) {
+            if (progressBarInterval) {
+                clearInterval(progressBarInterval);
+                progressBarInterval = null;
+            }
+            console.log(err);
+            showMessage(err);
+            vscode.window.showWarningMessage(err);
+            return null;
         }
-        console.log(err);
-        showMessage(err);
-        vscode.window.showWarningMessage(err);
-    }
-    return true;
+    }));
+    return analysisResults;
 }
 
 
@@ -362,7 +364,7 @@ export async function mythxAnalyze(progress) {
     let buildContractsDir: string = pathInfo.buildContractsDir;
     // FIXME: Add a better test to see if we are a truffle project
     try {
-        config = Config.detect(truffleOptions, pathInfo.rootDir);
+        config = Config.detect(truffleOptions);
         buildContractsDir = pathInfo.buildContractsDir;
     } catch (err) {
         // FIXME: Dummy up in config whatever we need to run compile.
@@ -392,6 +394,7 @@ export async function mythxAnalyze(progress) {
             },
             contracts_build_directory: buildContractsDir,
             contracts_directory: pathInfo.rootDir,
+            working_directory: pathInfo.rootDir,
         };
     }
 
@@ -407,12 +410,23 @@ export async function mythxAnalyze(progress) {
 
     // Set truffle compiler version based on vscode solidity's version info
     config.compilers.solc.version = vscode_solc.getVersion();
+    config.build_mythx_contracts = pathInfo.buildMythxContractsDir;
+
     await contractsCompile(config);
-    return await analyzeWithBuildDir({
-        buildContractsDir,
+    let analysisResults = await analyzeWithBuildDir({
+        buildContractsDir: pathInfo.buildMythxContractsDir,
         config,
         pathInfo,
         progress,
         solidityConfig,
     });
+
+    analysisResults = analysisResults.filter(res => res !== null);
+    analysisResults = analysisResults.reduce((accum, res) => accum.concat(res), []);
+
+    const groupedEslintIssues = groupEslintIssuesByBasename(analysisResults);
+    const uniqueIssues = getUniqueIssues(groupedEslintIssues);
+
+    const formatter = getFormatter(solidityConfig.mythx.reportFormat);
+    showMessage(formatter(uniqueIssues));
 }

+ 78 - 13
src/analysis/mythx/mythx.ts

@@ -1,4 +1,4 @@
-import * as smd from './sourceMappingDecoder';
+import * as smd from './compat/remix-lib/sourceMappingDecoder';
 import * as srcmap from './srcmap';
 import * as path from 'path';
 
@@ -20,6 +20,11 @@ const isFatal = (fatal, severity) => fatal || severity === 2;
 
 
 export class MythXIssues {
+    public logs: any;
+    public logger: any;
+    public debug: any;
+    public sourcePath: any;
+
     private _issues: any;
     private _contractName: any;
     private _buildObj: any;
@@ -35,10 +40,14 @@ export class MythXIssues {
      *
      * @param {object} buildObj - Truffle smart contract build object
      */
-    constructor(buildObj: any) {
+    constructor(buildObj: any, config: any) {
         this._issues = [];
         this._contractName = buildObj.contractName;
         this._buildObj = truffle2MythXJSON(buildObj);
+        this.logs = [];
+        this.debug = config.debug;
+        this.logger = config.logger;
+        this.sourcePath = buildObj.sourcePath;
         this.contractSource = buildObj.source;
         this.sourceMap = this._buildObj.sourceMap;
         this.deployedSourceMap = this._buildObj.deployedSourceMap;
@@ -97,10 +106,27 @@ export class MythXIssues {
      *
      * @param {object[]} issues - MythX analyze API output result issues
      */
-    public setIssues(issues) {
-        this._issues = issues
-            .map(remapMythXOutput)
+    public setIssues(issueGroups) {
+        issueGroups.forEach((issueGroup: any) => {
+            if (issueGroup.sourceType === 'solidity-file' &&
+            issueGroup.sourceFormat === 'text') {
+            const filteredIssues = [];
+            for (const issue of issueGroup.issues) {
+                for (const location of issue.locations) {
+                    if (!this.isIgnorable(location.sourceMap)) {
+                        filteredIssues.push(issue);
+                    }
+                }
+            }
+            issueGroup.issues = filteredIssues;
+        }
+        });
+        const remappedIssues = issueGroups.map(remapMythXOutput);
+        this._issues = remappedIssues
             .reduce((acc, curr) => acc.concat(curr), []);
+
+        const logs = issueGroups.map(issue => (issue.meta && issue.meta.logs) || []);
+        this.logs = logs.reduce((acc, curr) => acc.concat(curr), []);
     }
 
     public getBuildObj() {
@@ -145,14 +171,17 @@ export class MythXIssues {
     }
 
     // Is this an issue that should be ignored?
-    public isIgnorable(sourceMapLocation, options, source) {
-        const ast = this.asts[source];
-        const instIndex = sourceMapLocation.split(':')[0];
-        const node = srcmap.isVariableDeclaration(instIndex, this.deployedSourceMap, ast);
+    public isIgnorable(sourceMapLocation) {
+        const basename = path.basename(this.sourcePath);
+        if (!( basename in this.asts)) {
+            return false;
+        }
+        const ast = this.asts[basename];
+        const node = srcmap.isVariableDeclaration(sourceMapLocation, ast);
         if (node && srcmap.isDynamicArray(node)) {
-            if (options.debug) {
+            if (this.debug) {
                 // this might brealk if logger is none.
-                const logger = options.logger || console;
+                const logger = this.logger || console;
                 logger.log('**debug: Ignoring Mythril issue around ' +
                       'dynamically-allocated array.');
             }
@@ -369,14 +398,21 @@ export const truffle2MythXJSON = function(truffleJSON: any): any {
         contractName,
         bytecode,
         deployedBytecode,
-        sourceMap,
-        deployedSourceMap,
         sourcePath,
         source,
         ast,
+        legacyAST,
         compiler: { version },
     } = truffleJSON;
 
+    let { sourceMap, deployedSourceMap } = truffleJSON;
+    // FIXME: why do we only one sourcePath in sourceList?
+    // We shouldn't be zeroing this but instead correcting sourceList to
+    // have the multiple entries.
+    sourceMap = srcmap.zeroedSourceMap(sourceMap);
+    deployedSourceMap = srcmap.zeroedSourceMap(deployedSourceMap);
+
+
     const sourcesKey = path.basename(sourcePath);
 
     return {
@@ -389,6 +425,7 @@ export const truffle2MythXJSON = function(truffleJSON: any): any {
         sources: {
             [sourcesKey]: {
                 ast,
+                legacyAST,
                 source,
             },
         },
@@ -423,3 +460,31 @@ export const remapMythXOutput = mythObject => {
 
     return mapped;
 };
+
+export const newTruffleObjToOldTruffleByContracts = (buildObj: any) => {
+    const { sources, compiler } = buildObj;
+
+    let allContracts = [];
+
+    for (const e of Object.entries(sources)) {
+        const sourcePath: string = e[0];
+        const data: any = e[1];
+
+        const contracts = data.contracts.map(contract => ({
+            contractName: contract.contractName,
+            bytecode: contract.bytecode,
+            deployedBytecode: contract.deployedBytecode,
+            sourceMap: contract.sourceMap,
+            deployedSourceMap: contract.deployedSourceMap,
+            ast: data.ast,
+            legacyAST: data.legacyAST,
+            source: data.source,
+            compiler,
+            sourcePath,
+        }));
+
+        allContracts = allContracts.concat(contracts);
+    }
+
+    return allContracts;
+};

+ 37 - 9
src/analysis/mythx/srcmap.ts

@@ -2,24 +2,23 @@
 // We use this to filter out some MythX error messages.
 //
 
-import { SourceMappingDecoder } from './sourceMappingDecoder';
+import { SourceMappingDecoder } from './compat/remix-lib/sourceMappingDecoder';
 import {GetOpcode} from './opcodes';
 
 /**
  *  Return the VariableDeclaration AST node associated with instIndex
  *  if there is one. Otherwise return null.
- *  @param {instIndex} number  - bytecode offset of instruction
- *  @param {sourceMap} string  - solc srcmap used to associate the instruction
- *                               with an ast node
- *  @param {ast}               - solc root AST for contract
+ *  @param {sourceLocation} string  - solc srcmap used to associate the instruction
+ *                                    with an ast node
+ *  @param {ast}                    - solc root AST for contract
  *  @return {AST node or null}
  *
  */
-export function isVariableDeclaration (instIndex: number, sourceMap: string,
-                                       ast: any) {
+export function isVariableDeclaration (srcmap: string, ast: any) {
     const sourceMappingDecoder = new SourceMappingDecoder();
-    return sourceMappingDecoder.findNodeAtInstructionIndex('VariableDeclaration',
-                                                           instIndex, sourceMap, ast);
+    const sourceLocation = sourceMappingDecoder.decode(srcmap);
+    return sourceMappingDecoder.findNodeAtSourceLocation('VariableDeclaration',
+        sourceLocation, ast);
 }
 
 /* from remix-lib/src/util */
@@ -99,3 +98,32 @@ export function makeOffset2InstNum(hexstr: string): Array<number> {
     }
     return instMap;
 }
+
+// FIXME: this is just a stopgap measure.
+// The caller in mythx should be fixed to we don't need this.
+/**
+ *  @param {String} sourceMap     - solc-type sourceMap
+ *  @return take sourceMap entries and turn them into file index 0
+*/
+export function zeroedSourceMap (sourceMap: string) {
+    const srcArray = sourceMap.split(';');
+    const modArray = [];
+    const indexSeen = -2;
+    for (const src of srcArray) {
+        const fields = src.split(':');
+        if (fields.length >= 3) {
+            const index = fields[2];
+            if (index !== '-1' && index !== '') {
+                if (indexSeen !== -2) {
+                    if (indexSeen !== index) {
+                        throw new Error(`assuming only one index ${indexSeen} needs moving; saw ${index} as well`);
+                    }
+                }
+                fields[2] = '0';
+            }
+        }
+        const modFields = fields.join(':');
+        modArray.push(modFields);
+    }
+    return modArray.join(';');
+}

+ 9 - 29
src/analysis/mythx/trufstuf.ts

@@ -10,31 +10,12 @@ const readdir = util.promisify(fs.readdir);
 const readFile = util.promisify(fs.readFile);
 const stat = util.promisify(fs.stat);
 
-const parseBuildJson = async file => {
+export const parseBuildJson = async file => {
     const buildJson = await readFile(file, 'utf8');
     const buildObj = JSON.parse(buildJson);
     return buildObj;
 };
 
-/* returns true if directory/file out of date
-*/
-const staleBuildContract = async (directory, file) => {
-    const fullPath = path.join(directory, file);
-    const buildObj = await parseBuildJson(fullPath);
-    const fullPathStat = await stat(fullPath);
-    const buildMtime = fullPathStat.mtime;
-    const sourcePath = buildObj.sourcePath;
-    let sourcePathStat;
-
-    try {
-        sourcePathStat = await stat(sourcePath);
-    } catch (err) {
-        return true;
-    }
-
-    const sourceMtime = sourcePathStat.mtime;
-    return sourceMtime > buildMtime;
-};
 
 // Directories that must be in a truffle project
 
@@ -107,13 +88,8 @@ export const getRootDirAsync = async (p: string): Promise<string> => {
  */
 export const getTruffleBuildJsonFilesAsync = async function(directory: string) {
     const files = await readdir(directory);
-    const filtered1 = files.filter(f => f !== 'Migrations.json');
-    const promisified = await Promise.all(filtered1.map(async f => {
-        const isStale = await staleBuildContract(directory, f);
-        return isStale ? null : f;
-    }));
-    const filtered2 = promisified.filter(f => !!f);
-    const filePaths = filtered2.map(f => path.join(directory, f));
+    const filtered = files.filter(f => f !== 'Migrations.json');
+    const filePaths = filtered.map(f => path.join(directory, f));
     return filePaths;
 };
 
@@ -121,10 +97,14 @@ export function getBuildContractsDir(p: string): string {
     return `${p}/build/contracts`;
 }
 
+export function getBuildMythxContractsDir(p: string): string {
+    return `${p}/build/mythx/contracts`;
+}
+
 export function getContractsDir(p: string) {
     return `${p}/contracts`;
 }
 
-export function getMythReportsDir(buildContractsDir: string) {
-    return path.normalize(path.join(buildContractsDir, '..', 'mythx'));
+export function getMythReportsDir(buildMythXContractsDir: string) {
+    return path.normalize(path.join(buildMythXContractsDir, '..', 'reports'));
 }

+ 232 - 0
src/analysis/mythx/wfc.ts

@@ -0,0 +1,232 @@
+/*
+   This is largely a cut and paste of truffle-workflow-compile
+
+   We have modified it though to save additional information per contract
+   to assist MythX analysis.
+
+   In particular we add:
+     sourceList[]  - a list of the sources that can be used in a sourceMap.
+     sources       - a dict whose key is an entry of sourceList and whose value contains
+         source: string
+         ast: ast
+         legacyAst: ast
+*/
+import * as mkdirp from 'mkdirp';
+import * as path from 'path';
+import * as fs from 'fs-extra';
+import { callbackify, promisify } from 'util';
+import * as Config from 'truffle-config';
+import solcCompile from './compat/truffle-compile';
+import * as externalCompile from 'truffle-external-compile';
+import * as expect from 'truffle-expect';
+import * as Resolver from 'truffle-resolver';
+import * as Artifactor from 'truffle-artifactor';
+import * as OS from 'os';
+
+
+const SUPPORTED_COMPILERS = {
+    'external': externalCompile,
+    'solc': solcCompile,
+};
+
+/* A replacement for truffe-artifacts.save, that
+   puts in only MythX-needed fields.
+*/
+const mythXsave = function(object) {
+    const self = this;
+
+    return new Promise(function(accept, reject) {
+
+        if (object.contractName == null) {
+            return reject(new Error('You must specify a contract name.'));
+        }
+
+        delete object.contract_name;
+
+        let outputPath = object.contractName;
+
+        // Create new path off of destination.
+        outputPath = path.join(self.destination, outputPath);
+        outputPath = path.resolve(outputPath);
+
+        // Add json extension.
+        outputPath = outputPath + '.json';
+
+        fs.readFile(outputPath, {encoding: 'utf8'}, function(err, json) {
+            // No need to handle the error. If the file doesn't exist then we'll start afresh
+            // with a new object.
+
+            const finalObject = object;
+
+            if (!err) {
+                try {
+                    JSON.parse(json);
+                } catch (e) {
+                    reject(e);
+                }
+
+                /*
+                // normalize existing and merge into final
+                finalObject = Schema.normalize(existingObjDirty);
+
+                // merge networks
+                var finalNetworks = {};
+                _.merge(finalNetworks, finalObject.networks, object.networks);
+
+                // update existing with new
+                _.assign(finalObject, object);
+                finalObject.networks = finalNetworks;
+                */
+            }
+
+            // update timestamp
+            finalObject.updatedAt = new Date().toISOString();
+
+            // output object
+            fs.outputFile(outputPath, JSON.stringify(finalObject, null, 2), 'utf8', function(outErr: any) {
+                if (outErr) {
+                    return reject(outErr);
+                }
+                accept();
+            });
+        });
+    });
+};
+
+/* FIXME: if truffle-worflow-compile added a parameter, a directory name
+   under "build", we wouldn't have to change this.
+*/
+function prepareConfig(options) {
+    expect.options(options, [
+        'build_mythx_contracts',
+    ]);
+
+    // Use a config object to ensure we get the default sources.
+    const config = Config.default().merge(options);
+
+    if (!config.resolver) {
+        config.resolver = new Resolver(config);
+    }
+
+    if (!config.artifactor) {
+        config.artifactor = new Artifactor(config.build_mythx_contracts);
+        config.artifactor.save = mythXsave;
+    }
+
+    return config;
+}
+
+/*
+  This function is not modified from truffle-workflow-compile.
+*/
+function multiPromisify (func) {
+    // FIXME: accumulating this to a list is weird.
+    const resultList = [];
+    return (...args) => new Promise( (accept, reject) => {
+        const callback = (err, ...results) => {
+            if (err) {
+                reject(err);
+                return ;
+            }
+            resultList.push(results);
+            accept(resultList);
+        };
+
+        func(...args, callback);
+    });
+}
+
+const Contracts = {
+
+    // contracts_directory: String. Directory where .sol files can be found.
+    // contracts_build_mythx_contracts: String. Directory where .sol.js files can be found and written to.
+    // all: Boolean. Compile all sources found. Defaults to true. If false, will compare sources against built files
+    //      in the build directory to see what needs to be compiled.
+    // quiet: Boolean. Suppress output. Defaults to false.
+    // strict: Boolean. Return compiler warnings as errors. Defaults to false.
+    compile: callbackify(async function(options) {
+        const config = prepareConfig(options);
+
+        // FIXME: Simplify by removing vyper right now.
+        delete config.compilers.vyper;
+
+        const compilers = (config.compiler)
+            ? [config.compiler]
+            : Object.keys(config.compilers);
+
+
+        // convert to promise to compile+write
+        const compilations = await this.compileSources(config, compilers);
+
+        const collect = async (comps: any) => {
+            const result = {
+                basenames: {},
+                outputs: {},
+            };
+
+            for (const compilation of comps) {
+                const { compiler, artifacts } = compilation;
+
+                if (artifacts) {
+                    result.outputs[compiler] = artifacts;
+
+                    for (const artifact of artifacts) {
+                        for (const [ basename, abstraction ] of Object.entries(artifact)) {
+                            result.basenames[basename] = abstraction;
+                        }
+
+                    }
+                }
+            }
+
+            return result;
+        };
+
+        return await collect(compilations);
+    }),
+
+    compileSources: async function(config, compilers) {
+        return Promise.all(
+            compilers.map(async (compiler) => {
+                const compile = SUPPORTED_COMPILERS[compiler];
+                if (!compile) {
+                    throw new Error('Unsupported compiler: ' + compiler);
+                }
+
+                const compileFunc = (config.all === true || config.compileAll === true)
+                    ? compile.all
+                    : compile.necessary;
+
+                const results: any = await multiPromisify(compileFunc)(config);
+                if (results && results.length > 0) {
+                    let seenStale = false;
+                    for (const result of results) {
+                        const [artifact, stale] = result;
+                        if (stale) {
+                            if (config.quiet !== true && config.quietWrite !== true && !seenStale) {
+                                const relPath = path.relative(config.working_directory, config.build_mythx_contracts);
+                                config.logger.log(`Writing artifacts to .${path.sep}${relPath}${OS.EOL}`);
+                                seenStale = true;
+                            }
+                            await this.writeContracts(artifact, config);
+                        }
+                    }
+                }
+                return { compiler, results };
+            }),
+        );
+    },
+
+    writeContracts: async function(artifact, options) {
+        await promisify(mkdirp)(options.build_mythx_contracts);
+
+        const shortNames = Object.keys(artifact);
+        await Promise.all(shortNames.map(async (shortName) => {
+            const jsonData = JSON.stringify(artifact[shortName], null, 4);
+            const jsonPath = path.join(options.build_mythx_contracts, shortName + '.json');
+            return await promisify(fs.writeFile)(jsonPath, jsonData);
+        }));
+    },
+};
+
+export default Contracts;