Merge pull request #460 from 0xProject/feature/deployer-improvements
@0xproject/deployer improvements
This commit is contained in:
@@ -7,7 +7,8 @@
|
||||
"scripts": {
|
||||
"build:watch": "tsc -w",
|
||||
"build": "yarn clean && copyfiles 'test/fixtures/contracts/**/*' ./lib && tsc && copyfiles -u 3 './lib/src/monorepo_scripts/**/*' ./scripts",
|
||||
"test": "npm run build; mocha lib/test/*_test.js",
|
||||
"test": "run-s build run_mocha",
|
||||
"run_mocha": "mocha lib/test/*_test.js",
|
||||
"test:coverage": "nyc npm run test --all && yarn coverage:report:lcov",
|
||||
"coverage:report:lcov": "nyc report --reporter=text-lcov > coverage/lcov.info",
|
||||
"compile": "npm run build; node lib/src/cli.js compile",
|
||||
@@ -46,9 +47,12 @@
|
||||
"@0xproject/monorepo-scripts": "^0.1.14",
|
||||
"@0xproject/tslint-config": "^0.4.12",
|
||||
"@types/require-from-string": "^1.2.0",
|
||||
"@types/semver": "^5.5.0",
|
||||
"@types/yargs": "^11.0.0",
|
||||
"npm-run-all": "^4.1.2",
|
||||
"chai": "^4.0.1",
|
||||
"copyfiles": "^1.2.0",
|
||||
"dirty-chai": "^2.0.1",
|
||||
"ethers-typescript-typings": "^0.0.4",
|
||||
"mocha": "^4.0.1",
|
||||
"nyc": "^11.0.1",
|
||||
@@ -68,6 +72,7 @@
|
||||
"isomorphic-fetch": "^2.2.1",
|
||||
"lodash": "^4.17.4",
|
||||
"require-from-string": "^2.0.1",
|
||||
"semver": "^5.5.0",
|
||||
"solc": "^0.4.18",
|
||||
"web3": "^0.20.0",
|
||||
"web3-eth-abi": "^1.0.0-beta.24",
|
||||
|
@@ -10,6 +10,7 @@ import * as yargs from 'yargs';
|
||||
|
||||
import { commands } from './commands';
|
||||
import { constants } from './utils/constants';
|
||||
import { consoleReporter } from './utils/error_reporter';
|
||||
import { CliOptions, CompilerOptions, DeployerOptions } from './utils/types';
|
||||
|
||||
const DEFAULT_OPTIMIZER_ENABLED = false;
|
||||
@@ -24,11 +25,11 @@ const DEFAULT_CONTRACTS_LIST = '*';
|
||||
* Compiles all contracts with options passed in through CLI.
|
||||
* @param argv Instance of process.argv provided by yargs.
|
||||
*/
|
||||
async function onCompileCommand(argv: CliOptions): Promise<void> {
|
||||
async function onCompileCommandAsync(argv: CliOptions): Promise<void> {
|
||||
const opts: CompilerOptions = {
|
||||
contractsDir: argv.contractsDir,
|
||||
networkId: argv.networkId,
|
||||
optimizerEnabled: argv.shouldOptimize ? 1 : 0,
|
||||
optimizerEnabled: argv.shouldOptimize,
|
||||
artifactsDir: argv.artifactsDir,
|
||||
specifiedContracts: getContractsSetFromList(argv.contracts),
|
||||
};
|
||||
@@ -38,7 +39,7 @@ async function onCompileCommand(argv: CliOptions): Promise<void> {
|
||||
* Deploys a single contract with provided name and args.
|
||||
* @param argv Instance of process.argv provided by yargs.
|
||||
*/
|
||||
async function onDeployCommand(argv: CliOptions): Promise<void> {
|
||||
async function onDeployCommandAsync(argv: CliOptions): Promise<void> {
|
||||
const url = argv.jsonrpcUrl;
|
||||
const web3Provider = new Web3.providers.HttpProvider(url);
|
||||
const web3Wrapper = new Web3Wrapper(web3Provider);
|
||||
@@ -46,7 +47,7 @@ async function onDeployCommand(argv: CliOptions): Promise<void> {
|
||||
const compilerOpts: CompilerOptions = {
|
||||
contractsDir: argv.contractsDir,
|
||||
networkId,
|
||||
optimizerEnabled: argv.shouldOptimize ? 1 : 0,
|
||||
optimizerEnabled: argv.shouldOptimize,
|
||||
artifactsDir: argv.artifactsDir,
|
||||
specifiedContracts: getContractsSetFromList(argv.contracts),
|
||||
};
|
||||
@@ -62,9 +63,9 @@ async function onDeployCommand(argv: CliOptions): Promise<void> {
|
||||
networkId,
|
||||
defaults,
|
||||
};
|
||||
const deployerArgsString = argv.args;
|
||||
const deployerArgsString = argv.args as string;
|
||||
const deployerArgs = deployerArgsString.split(',');
|
||||
await commands.deployAsync(argv.contract, deployerArgs, deployerOpts);
|
||||
await commands.deployAsync(argv.contract as string, deployerArgs, deployerOpts);
|
||||
}
|
||||
/**
|
||||
* Creates a set of contracts to compile.
|
||||
@@ -142,7 +143,12 @@ function deployCommandBuilder(yargsInstance: any) {
|
||||
default: DEFAULT_CONTRACTS_LIST,
|
||||
description: 'comma separated list of contracts to compile',
|
||||
})
|
||||
.command('compile', 'compile contracts', identityCommandBuilder, onCompileCommand)
|
||||
.command('deploy', 'deploy a single contract with provided arguments', deployCommandBuilder, onDeployCommand)
|
||||
.command('compile', 'compile contracts', identityCommandBuilder, consoleReporter(onCompileCommandAsync))
|
||||
.command(
|
||||
'deploy',
|
||||
'deploy a single contract with provided arguments',
|
||||
deployCommandBuilder,
|
||||
consoleReporter(onDeployCommandAsync),
|
||||
)
|
||||
.help().argv;
|
||||
})();
|
||||
|
@@ -5,7 +5,7 @@ import { CompilerOptions, DeployerOptions } from './utils/types';
|
||||
export const commands = {
|
||||
async compileAsync(opts: CompilerOptions): Promise<void> {
|
||||
const compiler = new Compiler(opts);
|
||||
await compiler.compileAllAsync();
|
||||
await compiler.compileAsync();
|
||||
},
|
||||
async deployAsync(contractName: string, args: any[], opts: DeployerOptions): Promise<void> {
|
||||
const deployer = new Deployer(opts);
|
||||
|
@@ -5,10 +5,19 @@ import 'isomorphic-fetch';
|
||||
import * as _ from 'lodash';
|
||||
import * as path from 'path';
|
||||
import * as requireFromString from 'require-from-string';
|
||||
import * as semver from 'semver';
|
||||
import solc = require('solc');
|
||||
import * as Web3 from 'web3';
|
||||
|
||||
import { binPaths } from './solc/bin_paths';
|
||||
import {
|
||||
createArtifactsDirIfDoesNotExistAsync,
|
||||
findImportIfExist,
|
||||
getContractArtifactIfExistsAsync,
|
||||
getNormalizedErrMsg,
|
||||
parseDependencies,
|
||||
parseSolidityVersionRange,
|
||||
} from './utils/compiler';
|
||||
import { constants } from './utils/constants';
|
||||
import { fsWrapper } from './utils/fs_wrapper';
|
||||
import {
|
||||
@@ -23,10 +32,6 @@ import {
|
||||
import { utils } from './utils/utils';
|
||||
|
||||
const ALL_CONTRACTS_IDENTIFIER = '*';
|
||||
const SOLIDITY_VERSION_REGEX = /(?:solidity\s\^?)(\d+\.\d+\.\d+)/;
|
||||
const SOLIDITY_FILE_EXTENSION_REGEX = /(.*\.sol)/;
|
||||
const IMPORT_REGEX = /(import\s)/;
|
||||
const DEPENDENCY_PATH_REGEX = /"([^"]+)"/; // Source: https://github.com/BlockChainCompany/soljitsu/blob/master/lib/shared.js
|
||||
|
||||
/**
|
||||
* The Compiler facilitates compiling Solidity smart contracts and saves the results
|
||||
@@ -35,9 +40,10 @@ const DEPENDENCY_PATH_REGEX = /"([^"]+)"/; // Source: https://github.com/BlockCh
|
||||
export class Compiler {
|
||||
private _contractsDir: string;
|
||||
private _networkId: number;
|
||||
private _optimizerEnabled: number;
|
||||
private _optimizerEnabled: boolean;
|
||||
private _artifactsDir: string;
|
||||
private _contractSources?: ContractSources;
|
||||
// This get's set in the beggining of `compileAsync` function. It's not called from a constructor, but it's the only public method of that class and could as well be.
|
||||
private _contractSources!: ContractSources;
|
||||
private _solcErrors: Set<string> = new Set();
|
||||
private _specifiedContracts: Set<string> = new Set();
|
||||
private _contractSourceData: ContractSourceData = {};
|
||||
@@ -81,64 +87,6 @@ export class Compiler {
|
||||
}
|
||||
return sources;
|
||||
}
|
||||
/**
|
||||
* Gets contract dependendencies and keccak256 hash from source.
|
||||
* @param source Source code of contract.
|
||||
* @return Object with contract dependencies and keccak256 hash of source.
|
||||
*/
|
||||
private static _getContractSpecificSourceData(source: string): ContractSpecificSourceData {
|
||||
const dependencies: string[] = [];
|
||||
const sourceHash = ethUtil.sha3(source);
|
||||
const solcVersion = Compiler._parseSolidityVersion(source);
|
||||
const contractSpecificSourceData: ContractSpecificSourceData = {
|
||||
dependencies,
|
||||
solcVersion,
|
||||
sourceHash,
|
||||
};
|
||||
const lines = source.split('\n');
|
||||
_.forEach(lines, line => {
|
||||
if (!_.isNull(line.match(IMPORT_REGEX))) {
|
||||
const dependencyMatch = line.match(DEPENDENCY_PATH_REGEX);
|
||||
if (!_.isNull(dependencyMatch)) {
|
||||
const dependencyPath = dependencyMatch[1];
|
||||
const fileName = path.basename(dependencyPath);
|
||||
contractSpecificSourceData.dependencies.push(fileName);
|
||||
}
|
||||
}
|
||||
});
|
||||
return contractSpecificSourceData;
|
||||
}
|
||||
/**
|
||||
* Searches Solidity source code for compiler version.
|
||||
* @param source Source code of contract.
|
||||
* @return Solc compiler version.
|
||||
*/
|
||||
private static _parseSolidityVersion(source: string): string {
|
||||
const solcVersionMatch = source.match(SOLIDITY_VERSION_REGEX);
|
||||
if (_.isNull(solcVersionMatch)) {
|
||||
throw new Error('Could not find Solidity version in source');
|
||||
}
|
||||
const solcVersion = solcVersionMatch[1];
|
||||
return solcVersion;
|
||||
}
|
||||
/**
|
||||
* Normalizes the path found in the error message.
|
||||
* Example: converts 'base/Token.sol:6:46: Warning: Unused local variable'
|
||||
* to 'Token.sol:6:46: Warning: Unused local variable'
|
||||
* This is used to prevent logging the same error multiple times.
|
||||
* @param errMsg An error message from the compiled output.
|
||||
* @return The error message with directories truncated from the contract path.
|
||||
*/
|
||||
private static _getNormalizedErrMsg(errMsg: string): string {
|
||||
const errPathMatch = errMsg.match(SOLIDITY_FILE_EXTENSION_REGEX);
|
||||
if (_.isNull(errPathMatch)) {
|
||||
throw new Error('Could not find a path in error message');
|
||||
}
|
||||
const errPath = errPathMatch[0];
|
||||
const baseContract = path.basename(errPath);
|
||||
const normalizedErrMsg = errMsg.replace(errPath, baseContract);
|
||||
return normalizedErrMsg;
|
||||
}
|
||||
/**
|
||||
* Instantiates a new instance of the Compiler class.
|
||||
* @param opts Options specifying directories, network, and optimization settings.
|
||||
@@ -152,21 +100,18 @@ export class Compiler {
|
||||
this._specifiedContracts = opts.specifiedContracts;
|
||||
}
|
||||
/**
|
||||
* Compiles all Solidity files found in `contractsDir` and writes JSON artifacts to `artifactsDir`.
|
||||
* Compiles selected Solidity files found in `contractsDir` and writes JSON artifacts to `artifactsDir`.
|
||||
*/
|
||||
public async compileAllAsync(): Promise<void> {
|
||||
await this._createArtifactsDirIfDoesNotExistAsync();
|
||||
public async compileAsync(): Promise<void> {
|
||||
await createArtifactsDirIfDoesNotExistAsync(this._artifactsDir);
|
||||
this._contractSources = await Compiler._getContractSourcesAsync(this._contractsDir);
|
||||
_.forIn(this._contractSources, (source, fileName) => {
|
||||
this._contractSourceData[fileName] = Compiler._getContractSpecificSourceData(source);
|
||||
});
|
||||
_.forIn(this._contractSources, this._setContractSpecificSourceData.bind(this));
|
||||
const fileNames = this._specifiedContracts.has(ALL_CONTRACTS_IDENTIFIER)
|
||||
? _.keys(this._contractSources)
|
||||
: Array.from(this._specifiedContracts.values());
|
||||
_.forEach(fileNames, fileName => {
|
||||
this._setSourceTreeHash(fileName);
|
||||
});
|
||||
await Promise.all(_.map(fileNames, async fileName => this._compileContractAsync(fileName)));
|
||||
for (const fileName of fileNames) {
|
||||
await this._compileContractAsync(fileName);
|
||||
}
|
||||
this._solcErrors.forEach(errMsg => {
|
||||
logUtils.log(errMsg);
|
||||
});
|
||||
@@ -180,19 +125,28 @@ export class Compiler {
|
||||
throw new Error('Contract sources not yet initialized');
|
||||
}
|
||||
const contractSpecificSourceData = this._contractSourceData[fileName];
|
||||
const currentArtifactIfExists = (await this._getContractArtifactIfExistsAsync(fileName)) as ContractArtifact;
|
||||
const currentArtifactIfExists = await getContractArtifactIfExistsAsync(this._artifactsDir, fileName);
|
||||
const sourceHash = `0x${contractSpecificSourceData.sourceHash.toString('hex')}`;
|
||||
const sourceTreeHash = `0x${contractSpecificSourceData.sourceTreeHashIfExists.toString('hex')}`;
|
||||
const sourceTreeHash = `0x${contractSpecificSourceData.sourceTreeHash.toString('hex')}`;
|
||||
|
||||
const shouldCompile =
|
||||
_.isUndefined(currentArtifactIfExists) ||
|
||||
currentArtifactIfExists.networks[this._networkId].optimizer_enabled !== this._optimizerEnabled ||
|
||||
currentArtifactIfExists.networks[this._networkId].source_tree_hash !== sourceTreeHash;
|
||||
let shouldCompile = false;
|
||||
if (_.isUndefined(currentArtifactIfExists)) {
|
||||
shouldCompile = true;
|
||||
} else {
|
||||
const currentArtifact = currentArtifactIfExists as ContractArtifact;
|
||||
shouldCompile =
|
||||
currentArtifact.networks[this._networkId].optimizer_enabled !== this._optimizerEnabled ||
|
||||
currentArtifact.networks[this._networkId].source_tree_hash !== sourceTreeHash;
|
||||
}
|
||||
if (!shouldCompile) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fullSolcVersion = binPaths[contractSpecificSourceData.solcVersion];
|
||||
const availableCompilerVersions = _.keys(binPaths);
|
||||
const solcVersion = semver.maxSatisfying(
|
||||
availableCompilerVersions,
|
||||
contractSpecificSourceData.solcVersionRange,
|
||||
);
|
||||
const fullSolcVersion = binPaths[solcVersion];
|
||||
const compilerBinFilename = path.join(__dirname, '../../solc_bin', fullSolcVersion);
|
||||
let solcjs: string;
|
||||
const isCompilerAvailableLocally = fs.existsSync(compilerBinFilename);
|
||||
@@ -210,7 +164,7 @@ export class Compiler {
|
||||
}
|
||||
const solcInstance = solc.setupMethods(requireFromString(solcjs, compilerBinFilename));
|
||||
|
||||
logUtils.log(`Compiling ${fileName}...`);
|
||||
logUtils.log(`Compiling ${fileName} with Solidity v${solcVersion}...`);
|
||||
const source = this._contractSources[fileName];
|
||||
const input = {
|
||||
[fileName]: source,
|
||||
@@ -218,20 +172,23 @@ export class Compiler {
|
||||
const sourcesToCompile = {
|
||||
sources: input,
|
||||
};
|
||||
const compiled = solcInstance.compile(
|
||||
sourcesToCompile,
|
||||
this._optimizerEnabled,
|
||||
this._findImportsIfSourcesExist.bind(this),
|
||||
const compiled = solcInstance.compile(sourcesToCompile, Number(this._optimizerEnabled), importPath =>
|
||||
findImportIfExist(this._contractSources, importPath),
|
||||
);
|
||||
|
||||
if (!_.isUndefined(compiled.errors)) {
|
||||
_.forEach(compiled.errors, errMsg => {
|
||||
const normalizedErrMsg = Compiler._getNormalizedErrMsg(errMsg);
|
||||
const normalizedErrMsg = getNormalizedErrMsg(errMsg);
|
||||
this._solcErrors.add(normalizedErrMsg);
|
||||
});
|
||||
}
|
||||
const contractName = path.basename(fileName, constants.SOLIDITY_FILE_EXTENSION);
|
||||
const contractIdentifier = `${fileName}:${contractName}`;
|
||||
if (_.isUndefined(compiled.contracts[contractIdentifier])) {
|
||||
throw new Error(
|
||||
`Contract ${contractName} not found in ${fileName}. Please make sure your contract has the same name as it's file name`,
|
||||
);
|
||||
}
|
||||
const abi: Web3.ContractAbi = JSON.parse(compiled.contracts[contractIdentifier].interface);
|
||||
const bytecode = `0x${compiled.contracts[contractIdentifier].bytecode}`;
|
||||
const runtimeBytecode = `0x${compiled.contracts[contractIdentifier].runtimeBytecode}`;
|
||||
@@ -240,7 +197,7 @@ export class Compiler {
|
||||
const sources = _.keys(compiled.sources);
|
||||
const updated_at = Date.now();
|
||||
const contractNetworkData: ContractNetworkData = {
|
||||
solc_version: contractSpecificSourceData.solcVersion,
|
||||
solc_version: solcVersion,
|
||||
keccak256: sourceHash,
|
||||
source_tree_hash: sourceTreeHash,
|
||||
optimizer_enabled: this._optimizerEnabled,
|
||||
@@ -255,10 +212,11 @@ export class Compiler {
|
||||
|
||||
let newArtifact: ContractArtifact;
|
||||
if (!_.isUndefined(currentArtifactIfExists)) {
|
||||
const currentArtifact = currentArtifactIfExists as ContractArtifact;
|
||||
newArtifact = {
|
||||
...currentArtifactIfExists,
|
||||
...currentArtifact,
|
||||
networks: {
|
||||
...currentArtifactIfExists.networks,
|
||||
...currentArtifact.networks,
|
||||
[this._networkId]: contractNetworkData,
|
||||
},
|
||||
};
|
||||
@@ -277,79 +235,42 @@ export class Compiler {
|
||||
logUtils.log(`${fileName} artifact saved!`);
|
||||
}
|
||||
/**
|
||||
* Sets the source tree hash for a file and its dependencies.
|
||||
* @param fileName Name of contract file.
|
||||
* Gets contract dependendencies and keccak256 hash from source.
|
||||
* @param source Source code of contract.
|
||||
* @return Object with contract dependencies and keccak256 hash of source.
|
||||
*/
|
||||
private _setSourceTreeHash(fileName: string): void {
|
||||
const contractSpecificSourceData = this._contractSourceData[fileName];
|
||||
if (_.isUndefined(contractSpecificSourceData)) {
|
||||
throw new Error(`Contract data for ${fileName} not yet set`);
|
||||
private _setContractSpecificSourceData(source: string, fileName: string): void {
|
||||
if (!_.isUndefined(this._contractSourceData[fileName])) {
|
||||
return;
|
||||
}
|
||||
if (_.isUndefined(contractSpecificSourceData.sourceTreeHashIfExists)) {
|
||||
const dependencies = contractSpecificSourceData.dependencies;
|
||||
if (dependencies.length === 0) {
|
||||
contractSpecificSourceData.sourceTreeHashIfExists = contractSpecificSourceData.sourceHash;
|
||||
} else {
|
||||
_.forEach(dependencies, dependency => {
|
||||
this._setSourceTreeHash(dependency);
|
||||
});
|
||||
const dependencySourceTreeHashes = _.map(
|
||||
dependencies,
|
||||
dependency => this._contractSourceData[dependency].sourceTreeHashIfExists,
|
||||
);
|
||||
const sourceTreeHashesBuffer = Buffer.concat([
|
||||
contractSpecificSourceData.sourceHash,
|
||||
...dependencySourceTreeHashes,
|
||||
]);
|
||||
contractSpecificSourceData.sourceTreeHashIfExists = ethUtil.sha3(sourceTreeHashesBuffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Callback to resolve dependencies with `solc.compile`.
|
||||
* Throws error if contractSources not yet initialized.
|
||||
* @param importPath Path to an imported dependency.
|
||||
* @return Import contents object containing source code of dependency.
|
||||
*/
|
||||
private _findImportsIfSourcesExist(importPath: string): solc.ImportContents {
|
||||
const fileName = path.basename(importPath);
|
||||
const source = this._contractSources[fileName];
|
||||
if (_.isUndefined(source)) {
|
||||
throw new Error(`Contract source not found for ${fileName}`);
|
||||
}
|
||||
const importContents: solc.ImportContents = {
|
||||
contents: source,
|
||||
const sourceHash = ethUtil.sha3(source);
|
||||
const solcVersionRange = parseSolidityVersionRange(source);
|
||||
const dependencies = parseDependencies(source);
|
||||
const sourceTreeHash = this._getSourceTreeHash(fileName, sourceHash, dependencies);
|
||||
this._contractSourceData[fileName] = {
|
||||
dependencies,
|
||||
solcVersionRange,
|
||||
sourceHash,
|
||||
sourceTreeHash,
|
||||
};
|
||||
return importContents;
|
||||
}
|
||||
/**
|
||||
* Creates the artifacts directory if it does not already exist.
|
||||
*/
|
||||
private async _createArtifactsDirIfDoesNotExistAsync(): Promise<void> {
|
||||
if (!fsWrapper.doesPathExistSync(this._artifactsDir)) {
|
||||
logUtils.log('Creating artifacts directory...');
|
||||
await fsWrapper.mkdirAsync(this._artifactsDir);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Gets contract data on network or returns if an artifact does not exist.
|
||||
* Gets the source tree hash for a file and its dependencies.
|
||||
* @param fileName Name of contract file.
|
||||
* @return Contract data on network or undefined.
|
||||
*/
|
||||
private async _getContractArtifactIfExistsAsync(fileName: string): Promise<ContractArtifact | void> {
|
||||
let contractArtifact;
|
||||
const contractName = path.basename(fileName, constants.SOLIDITY_FILE_EXTENSION);
|
||||
const currentArtifactPath = `${this._artifactsDir}/${contractName}.json`;
|
||||
try {
|
||||
const opts = {
|
||||
encoding: 'utf8',
|
||||
};
|
||||
const contractArtifactString = await fsWrapper.readFileAsync(currentArtifactPath, opts);
|
||||
contractArtifact = JSON.parse(contractArtifactString);
|
||||
return contractArtifact;
|
||||
} catch (err) {
|
||||
logUtils.log(`Artifact for ${fileName} does not exist`);
|
||||
return undefined;
|
||||
private _getSourceTreeHash(fileName: string, sourceHash: Buffer, dependencies: string[]): Buffer {
|
||||
if (dependencies.length === 0) {
|
||||
return sourceHash;
|
||||
} else {
|
||||
const dependencySourceTreeHashes = _.map(dependencies, dependency => {
|
||||
const source = this._contractSources[dependency];
|
||||
this._setContractSpecificSourceData(source, dependency);
|
||||
const sourceData = this._contractSourceData[dependency];
|
||||
return this._getSourceTreeHash(dependency, sourceData.sourceHash, sourceData.dependencies);
|
||||
});
|
||||
const sourceTreeHashesBuffer = Buffer.concat([sourceHash, ...dependencySourceTreeHashes]);
|
||||
const sourceTreeHash = ethUtil.sha3(sourceTreeHashesBuffer);
|
||||
return sourceTreeHash;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
2
packages/deployer/src/globals.d.ts
vendored
2
packages/deployer/src/globals.d.ts
vendored
@@ -1,3 +1,5 @@
|
||||
declare module 'dirty-chai';
|
||||
|
||||
// tslint:disable:completed-docs
|
||||
declare module 'solc' {
|
||||
import * as Web3 from 'web3';
|
||||
|
123
packages/deployer/src/utils/compiler.ts
Normal file
123
packages/deployer/src/utils/compiler.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { logUtils } from '@0xproject/utils';
|
||||
import * as _ from 'lodash';
|
||||
import * as path from 'path';
|
||||
import * as solc from 'solc';
|
||||
|
||||
import { constants } from './constants';
|
||||
import { fsWrapper } from './fs_wrapper';
|
||||
import { ContractArtifact, ContractSources } from './types';
|
||||
|
||||
/**
|
||||
* Gets contract data on network or returns if an artifact does not exist.
|
||||
* @param artifactsDir Path to the artifacts directory.
|
||||
* @param fileName Name of contract file.
|
||||
* @return Contract data on network or undefined.
|
||||
*/
|
||||
export async function getContractArtifactIfExistsAsync(
|
||||
artifactsDir: string,
|
||||
fileName: string,
|
||||
): Promise<ContractArtifact | void> {
|
||||
let contractArtifact;
|
||||
const contractName = path.basename(fileName, constants.SOLIDITY_FILE_EXTENSION);
|
||||
const currentArtifactPath = `${artifactsDir}/${contractName}.json`;
|
||||
try {
|
||||
const opts = {
|
||||
encoding: 'utf8',
|
||||
};
|
||||
const contractArtifactString = await fsWrapper.readFileAsync(currentArtifactPath, opts);
|
||||
contractArtifact = JSON.parse(contractArtifactString);
|
||||
return contractArtifact;
|
||||
} catch (err) {
|
||||
logUtils.log(`Artifact for ${fileName} does not exist`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the artifacts directory if it does not already exist.
|
||||
* @param artifactsDir Path to the artifacts directory.
|
||||
*/
|
||||
export async function createArtifactsDirIfDoesNotExistAsync(artifactsDir: string): Promise<void> {
|
||||
if (!fsWrapper.doesPathExistSync(artifactsDir)) {
|
||||
logUtils.log('Creating artifacts directory...');
|
||||
await fsWrapper.mkdirAsync(artifactsDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches Solidity source code for compiler version range.
|
||||
* @param source Source code of contract.
|
||||
* @return Solc compiler version range.
|
||||
*/
|
||||
export function parseSolidityVersionRange(source: string): string {
|
||||
const SOLIDITY_VERSION_RANGE_REGEX = /pragma\s+solidity\s+(.*);/;
|
||||
const solcVersionRangeMatch = source.match(SOLIDITY_VERSION_RANGE_REGEX);
|
||||
if (_.isNull(solcVersionRangeMatch)) {
|
||||
throw new Error('Could not find Solidity version range in source');
|
||||
}
|
||||
const solcVersionRange = solcVersionRangeMatch[1];
|
||||
return solcVersionRange;
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes the path found in the error message.
|
||||
* Example: converts 'base/Token.sol:6:46: Warning: Unused local variable'
|
||||
* to 'Token.sol:6:46: Warning: Unused local variable'
|
||||
* This is used to prevent logging the same error multiple times.
|
||||
* @param errMsg An error message from the compiled output.
|
||||
* @return The error message with directories truncated from the contract path.
|
||||
*/
|
||||
export function getNormalizedErrMsg(errMsg: string): string {
|
||||
const SOLIDITY_FILE_EXTENSION_REGEX = /(.*\.sol)/;
|
||||
const errPathMatch = errMsg.match(SOLIDITY_FILE_EXTENSION_REGEX);
|
||||
if (_.isNull(errPathMatch)) {
|
||||
throw new Error('Could not find a path in error message');
|
||||
}
|
||||
const errPath = errPathMatch[0];
|
||||
const baseContract = path.basename(errPath);
|
||||
const normalizedErrMsg = errMsg.replace(errPath, baseContract);
|
||||
return normalizedErrMsg;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the contract source code and extracts the dendencies
|
||||
* @param source Contract source code
|
||||
* @return List of dependendencies
|
||||
*/
|
||||
export function parseDependencies(source: string): string[] {
|
||||
// TODO: Use a proper parser
|
||||
const IMPORT_REGEX = /(import\s)/;
|
||||
const DEPENDENCY_PATH_REGEX = /"([^"]+)"/; // Source: https://github.com/BlockChainCompany/soljitsu/blob/master/lib/shared.js
|
||||
const dependencies: string[] = [];
|
||||
const lines = source.split('\n');
|
||||
_.forEach(lines, line => {
|
||||
if (!_.isNull(line.match(IMPORT_REGEX))) {
|
||||
const dependencyMatch = line.match(DEPENDENCY_PATH_REGEX);
|
||||
if (!_.isNull(dependencyMatch)) {
|
||||
const dependencyPath = dependencyMatch[1];
|
||||
const basenName = path.basename(dependencyPath);
|
||||
dependencies.push(basenName);
|
||||
}
|
||||
}
|
||||
});
|
||||
return dependencies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback to resolve dependencies with `solc.compile`.
|
||||
* Throws error if contractSources not yet initialized.
|
||||
* @param contractSources Source codes of contracts.
|
||||
* @param importPath Path to an imported dependency.
|
||||
* @return Import contents object containing source code of dependency.
|
||||
*/
|
||||
export function findImportIfExist(contractSources: ContractSources, importPath: string): solc.ImportContents {
|
||||
const fileName = path.basename(importPath);
|
||||
const source = contractSources[fileName];
|
||||
if (_.isUndefined(source)) {
|
||||
throw new Error(`Contract source not found for ${fileName}`);
|
||||
}
|
||||
const importContents: solc.ImportContents = {
|
||||
contents: source,
|
||||
};
|
||||
return importContents;
|
||||
}
|
18
packages/deployer/src/utils/error_reporter.ts
Normal file
18
packages/deployer/src/utils/error_reporter.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { logUtils } from '@0xproject/utils';
|
||||
|
||||
/**
|
||||
* Makes an async function no-throw printing errors to the console
|
||||
* @param asyncFn async function to wrap
|
||||
* @return Wrapped version of the passed function
|
||||
*/
|
||||
export function consoleReporter<T>(asyncFn: (arg: T) => Promise<void>): (arg: T) => Promise<void> {
|
||||
const noThrowFnAsync = async (arg: T) => {
|
||||
try {
|
||||
const result = await asyncFn(arg);
|
||||
return result;
|
||||
} catch (err) {
|
||||
logUtils.log(`${err}`);
|
||||
}
|
||||
};
|
||||
return noThrowFnAsync;
|
||||
}
|
@@ -7,5 +7,6 @@ export const fsWrapper = {
|
||||
writeFileAsync: promisify<undefined>(fs.writeFile),
|
||||
mkdirAsync: promisify<undefined>(fs.mkdir),
|
||||
doesPathExistSync: fs.existsSync,
|
||||
rmdirSync: fs.rmdirSync,
|
||||
removeFileAsync: promisify<undefined>(fs.unlink),
|
||||
};
|
||||
|
@@ -20,7 +20,7 @@ export interface ContractNetworks {
|
||||
|
||||
export interface ContractNetworkData {
|
||||
solc_version: string;
|
||||
optimizer_enabled: number;
|
||||
optimizer_enabled: boolean;
|
||||
keccak256: string;
|
||||
source_tree_hash: string;
|
||||
abi: Web3.ContractAbi;
|
||||
@@ -53,7 +53,7 @@ export interface CliOptions extends yargs.Arguments {
|
||||
export interface CompilerOptions {
|
||||
contractsDir: string;
|
||||
networkId: number;
|
||||
optimizerEnabled: number;
|
||||
optimizerEnabled: boolean;
|
||||
artifactsDir: string;
|
||||
specifiedContracts: Set<string>;
|
||||
}
|
||||
@@ -84,27 +84,9 @@ export interface ContractSourceData {
|
||||
|
||||
export interface ContractSpecificSourceData {
|
||||
dependencies: string[];
|
||||
solcVersion: string;
|
||||
solcVersionRange: string;
|
||||
sourceHash: Buffer;
|
||||
sourceTreeHashIfExists?: Buffer;
|
||||
}
|
||||
|
||||
// TODO: Consolidate with 0x.js definitions once types are moved into a separate package.
|
||||
export enum ZeroExError {
|
||||
ContractDoesNotExist = 'CONTRACT_DOES_NOT_EXIST',
|
||||
ExchangeContractDoesNotExist = 'EXCHANGE_CONTRACT_DOES_NOT_EXIST',
|
||||
UnhandledError = 'UNHANDLED_ERROR',
|
||||
UserHasNoAssociatedAddress = 'USER_HAS_NO_ASSOCIATED_ADDRESSES',
|
||||
InvalidSignature = 'INVALID_SIGNATURE',
|
||||
ContractNotDeployedOnNetwork = 'CONTRACT_NOT_DEPLOYED_ON_NETWORK',
|
||||
InsufficientAllowanceForTransfer = 'INSUFFICIENT_ALLOWANCE_FOR_TRANSFER',
|
||||
InsufficientBalanceForTransfer = 'INSUFFICIENT_BALANCE_FOR_TRANSFER',
|
||||
InsufficientEthBalanceForDeposit = 'INSUFFICIENT_ETH_BALANCE_FOR_DEPOSIT',
|
||||
InsufficientWEthBalanceForWithdrawal = 'INSUFFICIENT_WETH_BALANCE_FOR_WITHDRAWAL',
|
||||
InvalidJump = 'INVALID_JUMP',
|
||||
OutOfGas = 'OUT_OF_GAS',
|
||||
NoNetworkId = 'NO_NETWORK_ID',
|
||||
SubscriptionNotFound = 'SUBSCRIPTION_NOT_FOUND',
|
||||
sourceTreeHash: Buffer;
|
||||
}
|
||||
|
||||
export interface Token {
|
||||
|
47
packages/deployer/test/compiler_test.ts
Normal file
47
packages/deployer/test/compiler_test.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import * as chai from 'chai';
|
||||
import 'mocha';
|
||||
|
||||
import { Compiler } from '../src/compiler';
|
||||
import { fsWrapper } from '../src/utils/fs_wrapper';
|
||||
import { CompilerOptions, ContractArtifact, ContractNetworkData, DoneCallback } from '../src/utils/types';
|
||||
|
||||
import { exchange_binary } from './fixtures/exchange_bin';
|
||||
import { constants } from './util/constants';
|
||||
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('#Compiler', function() {
|
||||
this.timeout(constants.timeoutMs);
|
||||
const artifactsDir = `${__dirname}/fixtures/artifacts`;
|
||||
const contractsDir = `${__dirname}/fixtures/contracts`;
|
||||
const exchangeArtifactPath = `${artifactsDir}/Exchange.json`;
|
||||
const compilerOpts: CompilerOptions = {
|
||||
artifactsDir,
|
||||
contractsDir,
|
||||
networkId: constants.networkId,
|
||||
optimizerEnabled: constants.optimizerEnabled,
|
||||
specifiedContracts: new Set(constants.specifiedContracts),
|
||||
};
|
||||
const compiler = new Compiler(compilerOpts);
|
||||
beforeEach((done: DoneCallback) => {
|
||||
(async () => {
|
||||
if (fsWrapper.doesPathExistSync(exchangeArtifactPath)) {
|
||||
await fsWrapper.removeFileAsync(exchangeArtifactPath);
|
||||
}
|
||||
await compiler.compileAsync();
|
||||
done();
|
||||
})().catch(done);
|
||||
});
|
||||
it('should create an Exchange artifact with the correct unlinked binary', async () => {
|
||||
const opts = {
|
||||
encoding: 'utf8',
|
||||
};
|
||||
const exchangeArtifactString = await fsWrapper.readFileAsync(exchangeArtifactPath, opts);
|
||||
const exchangeArtifact: ContractArtifact = JSON.parse(exchangeArtifactString);
|
||||
const exchangeContractData: ContractNetworkData = exchangeArtifact.networks[constants.networkId];
|
||||
// The last 43 bytes of the binaries are metadata which may not be equivalent
|
||||
const unlinkedBinaryWithoutMetadata = exchangeContractData.bytecode.slice(0, -86);
|
||||
const exchangeBinaryWithoutMetadata = exchange_binary.slice(0, -86);
|
||||
expect(unlinkedBinaryWithoutMetadata).to.equal(exchangeBinaryWithoutMetadata);
|
||||
});
|
||||
});
|
74
packages/deployer/test/compiler_utils_test.ts
Normal file
74
packages/deployer/test/compiler_utils_test.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import * as chai from 'chai';
|
||||
import * as dirtyChai from 'dirty-chai';
|
||||
import 'mocha';
|
||||
|
||||
import {
|
||||
createArtifactsDirIfDoesNotExistAsync,
|
||||
getNormalizedErrMsg,
|
||||
parseDependencies,
|
||||
parseSolidityVersionRange,
|
||||
} from '../src/utils/compiler';
|
||||
import { fsWrapper } from '../src/utils/fs_wrapper';
|
||||
|
||||
chai.use(dirtyChai);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('Compiler utils', () => {
|
||||
describe('#getNormalizedErrorMessage', () => {
|
||||
it('normalizes the error message', () => {
|
||||
const errMsg = 'base/Token.sol:6:46: Warning: Unused local variable';
|
||||
const normalizedErrMsg = getNormalizedErrMsg(errMsg);
|
||||
expect(normalizedErrMsg).to.be.equal('Token.sol:6:46: Warning: Unused local variable');
|
||||
});
|
||||
});
|
||||
describe('#createArtifactsDirIfDoesNotExistAsync', () => {
|
||||
it('creates artifacts dir', async () => {
|
||||
const artifactsDir = `${__dirname}/artifacts`;
|
||||
expect(fsWrapper.doesPathExistSync(artifactsDir)).to.be.false();
|
||||
await createArtifactsDirIfDoesNotExistAsync(artifactsDir);
|
||||
expect(fsWrapper.doesPathExistSync(artifactsDir)).to.be.true();
|
||||
fsWrapper.rmdirSync(artifactsDir);
|
||||
expect(fsWrapper.doesPathExistSync(artifactsDir)).to.be.false();
|
||||
});
|
||||
});
|
||||
describe('#parseSolidityVersionRange', () => {
|
||||
it('correctly parses the version range', () => {
|
||||
expect(parseSolidityVersionRange('pragma solidity ^0.0.1;')).to.be.equal('^0.0.1');
|
||||
expect(parseSolidityVersionRange('\npragma solidity 0.0.1;')).to.be.equal('0.0.1');
|
||||
expect(parseSolidityVersionRange('pragma solidity <=1.0.1;')).to.be.equal('<=1.0.1');
|
||||
expect(parseSolidityVersionRange('pragma solidity ~1.0.1;')).to.be.equal('~1.0.1');
|
||||
});
|
||||
// TODO: For now that doesn't work. This will work after we switch to a grammar-based parser
|
||||
it.skip('correctly parses the version range with comments', () => {
|
||||
expect(parseSolidityVersionRange('// pragma solidity ~1.0.1;\npragma solidity ~1.0.2;')).to.be.equal(
|
||||
'~1.0.2',
|
||||
);
|
||||
});
|
||||
});
|
||||
describe('#parseDependencies', () => {
|
||||
it('correctly parses Exchange dependencies', async () => {
|
||||
const exchangeSource = await fsWrapper.readFileAsync(`${__dirname}/fixtures/contracts/Exchange.sol`, {
|
||||
encoding: 'utf8',
|
||||
});
|
||||
expect(parseDependencies(exchangeSource)).to.be.deep.equal([
|
||||
'TokenTransferProxy.sol',
|
||||
'Token.sol',
|
||||
'SafeMath.sol',
|
||||
]);
|
||||
});
|
||||
it('correctly parses TokenTransferProxy dependencies', async () => {
|
||||
const exchangeSource = await fsWrapper.readFileAsync(
|
||||
`${__dirname}/fixtures/contracts/TokenTransferProxy.sol`,
|
||||
{
|
||||
encoding: 'utf8',
|
||||
},
|
||||
);
|
||||
expect(parseDependencies(exchangeSource)).to.be.deep.equal(['Token.sol', 'Ownable.sol']);
|
||||
});
|
||||
// TODO: For now that doesn't work. This will work after we switch to a grammar-based parser
|
||||
it.skip('correctly parses commented out dependencies', async () => {
|
||||
const contractWithCommentedOutDependencies = `// import "./TokenTransferProxy.sol";`;
|
||||
expect(parseDependencies(contractWithCommentedOutDependencies)).to.be.deep.equal([]);
|
||||
});
|
||||
});
|
||||
});
|
@@ -10,55 +10,38 @@ import { constructor_args, exchange_binary } from './fixtures/exchange_bin';
|
||||
import { constants } from './util/constants';
|
||||
|
||||
const expect = chai.expect;
|
||||
const artifactsDir = `${__dirname}/fixtures/artifacts`;
|
||||
const contractsDir = `${__dirname}/fixtures/contracts`;
|
||||
const exchangeArtifactPath = `${artifactsDir}/Exchange.json`;
|
||||
const compilerOpts: CompilerOptions = {
|
||||
artifactsDir,
|
||||
contractsDir,
|
||||
networkId: constants.networkId,
|
||||
optimizerEnabled: constants.optimizerEnabled,
|
||||
specifiedContracts: new Set(constants.specifiedContracts),
|
||||
};
|
||||
const compiler = new Compiler(compilerOpts);
|
||||
const deployerOpts = {
|
||||
artifactsDir,
|
||||
networkId: constants.networkId,
|
||||
jsonrpcUrl: constants.jsonrpcUrl,
|
||||
defaults: {
|
||||
gasPrice: constants.gasPrice,
|
||||
},
|
||||
};
|
||||
const deployer = new Deployer(deployerOpts);
|
||||
|
||||
/* tslint:disable */
|
||||
beforeEach(function(done: DoneCallback) {
|
||||
this.timeout(constants.timeoutMs);
|
||||
(async () => {
|
||||
if (fsWrapper.doesPathExistSync(exchangeArtifactPath)) {
|
||||
await fsWrapper.removeFileAsync(exchangeArtifactPath);
|
||||
}
|
||||
await compiler.compileAllAsync();
|
||||
done();
|
||||
})().catch(done);
|
||||
});
|
||||
/* tslint:enable */
|
||||
|
||||
describe('#Compiler', () => {
|
||||
it('should create an Exchange artifact with the correct unlinked binary', async () => {
|
||||
const opts = {
|
||||
encoding: 'utf8',
|
||||
};
|
||||
const exchangeArtifactString = await fsWrapper.readFileAsync(exchangeArtifactPath, opts);
|
||||
const exchangeArtifact: ContractArtifact = JSON.parse(exchangeArtifactString);
|
||||
const exchangeContractData: ContractNetworkData = exchangeArtifact.networks[constants.networkId];
|
||||
// The last 43 bytes of the binaries are metadata which may not be equivalent
|
||||
const unlinkedBinaryWithoutMetadata = exchangeContractData.bytecode.slice(0, -86);
|
||||
const exchangeBinaryWithoutMetadata = exchange_binary.slice(0, -86);
|
||||
expect(unlinkedBinaryWithoutMetadata).to.equal(exchangeBinaryWithoutMetadata);
|
||||
});
|
||||
});
|
||||
describe('#Deployer', () => {
|
||||
const artifactsDir = `${__dirname}/fixtures/artifacts`;
|
||||
const contractsDir = `${__dirname}/fixtures/contracts`;
|
||||
const exchangeArtifactPath = `${artifactsDir}/Exchange.json`;
|
||||
const compilerOpts: CompilerOptions = {
|
||||
artifactsDir,
|
||||
contractsDir,
|
||||
networkId: constants.networkId,
|
||||
optimizerEnabled: constants.optimizerEnabled,
|
||||
specifiedContracts: new Set(constants.specifiedContracts),
|
||||
};
|
||||
const compiler = new Compiler(compilerOpts);
|
||||
const deployerOpts = {
|
||||
artifactsDir,
|
||||
networkId: constants.networkId,
|
||||
jsonrpcUrl: constants.jsonrpcUrl,
|
||||
defaults: {
|
||||
gasPrice: constants.gasPrice,
|
||||
},
|
||||
};
|
||||
const deployer = new Deployer(deployerOpts);
|
||||
beforeEach(function(done: DoneCallback) {
|
||||
this.timeout(constants.timeoutMs);
|
||||
(async () => {
|
||||
if (fsWrapper.doesPathExistSync(exchangeArtifactPath)) {
|
||||
await fsWrapper.removeFileAsync(exchangeArtifactPath);
|
||||
}
|
||||
await compiler.compileAsync();
|
||||
done();
|
||||
})().catch(done);
|
||||
});
|
||||
describe('#deployAsync', () => {
|
||||
it('should deploy the Exchange contract without updating the Exchange artifact', async () => {
|
||||
const exchangeConstructorArgs = [constants.zrxTokenAddress, constants.tokenTransferProxyAddress];
|
@@ -3,7 +3,7 @@ import { BigNumber } from '@0xproject/utils';
|
||||
export const constants = {
|
||||
networkId: 0,
|
||||
jsonrpcUrl: 'http://localhost:8545',
|
||||
optimizerEnabled: 0,
|
||||
optimizerEnabled: false,
|
||||
gasPrice: new BigNumber(20000000000),
|
||||
timeoutMs: 20000,
|
||||
zrxTokenAddress: '0xe41d2489571d322189246dafa5ebde1f4699f498',
|
||||
|
@@ -2,8 +2,7 @@
|
||||
"extends": "../../tsconfig",
|
||||
"compilerOptions": {
|
||||
"outDir": "lib",
|
||||
"strictFunctionTypes": false,
|
||||
"strictNullChecks": false
|
||||
"strictFunctionTypes": false
|
||||
},
|
||||
"include": [
|
||||
"./src/**/*",
|
||||
|
@@ -324,6 +324,10 @@
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/require-from-string/-/require-from-string-1.2.0.tgz#c18cfc8a2c1a0259e5841d1fef2b5e9d01c64242"
|
||||
|
||||
"@types/semver@^5.5.0":
|
||||
version "5.5.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/semver/-/semver-5.5.0.tgz#146c2a29ee7d3bae4bf2fcb274636e264c813c45"
|
||||
|
||||
"@types/serve-static@*":
|
||||
version "1.13.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.13.1.tgz#1d2801fa635d274cd97d4ec07e26b21b44127492"
|
||||
|
Reference in New Issue
Block a user