Merge branch 'development' into feat/contracts/split-packages
This commit is contained in:
@@ -16,6 +16,15 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"version": "2.0.1",
|
||||
"changes": [
|
||||
{
|
||||
"note": "Fix imports in `TestConstants` and `TestLibBytes` to be relative. This way they show up correctly in coverage reports",
|
||||
"pr": 1535
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"changes": [
|
||||
|
||||
8
packages/devnet/docker-compose.yml
Normal file
8
packages/devnet/docker-compose.yml
Normal file
@@ -0,0 +1,8 @@
|
||||
version: '3'
|
||||
services:
|
||||
devnet:
|
||||
image: 0x-devnet:latest
|
||||
build:
|
||||
context: .
|
||||
ports:
|
||||
- 8501:8501
|
||||
@@ -15,6 +15,7 @@ mkdir -p /var/log
|
||||
--rpc \
|
||||
--rpcaddr '0.0.0.0' \
|
||||
--rpcport 8501 \
|
||||
--rpcvhosts '*' \
|
||||
--rpcapi 'personal,db,eth,net,web3,txpool,miner,debug' \
|
||||
--networkid 50 \
|
||||
--gasprice '2000000000' \
|
||||
|
||||
@@ -50,6 +50,8 @@ export class CoverageSubprovider extends TraceInfoSubprovider {
|
||||
}
|
||||
}
|
||||
|
||||
const IGNORE_REGEXP = /\/\*\s*solcov\s+ignore\s+next\s*\*\/\s*/gm;
|
||||
|
||||
/**
|
||||
* Computed partial coverage for a single file & subtrace.
|
||||
* @param contractData Contract metadata (source, srcMap, bytecode)
|
||||
@@ -65,7 +67,7 @@ export const coverageHandler: SingleFileSubtraceHandler = (
|
||||
fileIndex: number,
|
||||
): Coverage => {
|
||||
const absoluteFileName = contractData.sources[fileIndex];
|
||||
const coverageEntriesDescription = collectCoverageEntries(contractData.sourceCodes[fileIndex]);
|
||||
const coverageEntriesDescription = collectCoverageEntries(contractData.sourceCodes[fileIndex], IGNORE_REGEXP);
|
||||
|
||||
// if the source wasn't provided for the fileIndex, we can't cover the file
|
||||
if (_.isUndefined(coverageEntriesDescription)) {
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
[
|
||||
{
|
||||
"version": "2.0.1",
|
||||
"changes": [
|
||||
{
|
||||
"note": "Fix a bug when some parts of the profiling report were missing because of the coverage ignore lines",
|
||||
"pr": 1535
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"changes": [
|
||||
|
||||
@@ -1,4 +1,25 @@
|
||||
[
|
||||
{
|
||||
"version": "6.0.0",
|
||||
"changes": [
|
||||
{
|
||||
"note": "`SolCompilerArtifactAdapter` now uses `SolResolver` under the hood which allows to resolve `NPM` dependencies properly",
|
||||
"pr": 1535
|
||||
},
|
||||
{
|
||||
"note": "Cache the `utils.getContractDataIfExists` leading to faster execution",
|
||||
"pr": 1535
|
||||
},
|
||||
{
|
||||
"note": "`SolCompilerArtifactAdapter` now doesn't return the `ContractData` for interfaces",
|
||||
"pr": 1535
|
||||
},
|
||||
{
|
||||
"note": "Print resasonable error message on bytecode collision",
|
||||
"pr": 1535
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"version": "5.0.0",
|
||||
"changes": [
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
"dependencies": {
|
||||
"@0x/dev-utils": "^1.0.24",
|
||||
"@0x/sol-compiler": "^2.0.2",
|
||||
"@0x/sol-resolver": "^1.2.3",
|
||||
"@0x/subproviders": "^2.1.11",
|
||||
"@0x/typescript-typings": "^3.0.8",
|
||||
"@0x/utils": "^3.0.1",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { FallthroughResolver, FSResolver, NPMResolver, RelativeFSResolver, URLResolver } from '@0x/sol-resolver';
|
||||
import { logUtils } from '@0x/utils';
|
||||
import { CompilerOptions, ContractArtifact } from 'ethereum-types';
|
||||
import * as fs from 'fs';
|
||||
@@ -14,6 +15,7 @@ const CONFIG_FILE = 'compiler.json';
|
||||
export class SolCompilerArtifactAdapter extends AbstractArtifactAdapter {
|
||||
private readonly _artifactsPath: string;
|
||||
private readonly _sourcesPath: string;
|
||||
private readonly _resolver: FallthroughResolver;
|
||||
/**
|
||||
* Instantiates a SolCompilerArtifactAdapter
|
||||
* @param artifactsPath Path to your artifacts directory
|
||||
@@ -32,6 +34,12 @@ export class SolCompilerArtifactAdapter extends AbstractArtifactAdapter {
|
||||
throw new Error(`contractsDir not found in ${CONFIG_FILE}`);
|
||||
}
|
||||
this._sourcesPath = (sourcesPath || config.contractsDir) as string;
|
||||
this._resolver = new FallthroughResolver();
|
||||
this._resolver.appendResolver(new URLResolver());
|
||||
const packagePath = path.resolve('');
|
||||
this._resolver.appendResolver(new NPMResolver(packagePath));
|
||||
this._resolver.appendResolver(new RelativeFSResolver(this._sourcesPath));
|
||||
this._resolver.appendResolver(new FSResolver());
|
||||
}
|
||||
public async collectContractsDataAsync(): Promise<ContractData[]> {
|
||||
const artifactsGlob = `${this._artifactsPath}/**/*.json`;
|
||||
@@ -46,10 +54,9 @@ export class SolCompilerArtifactAdapter extends AbstractArtifactAdapter {
|
||||
const sources: Sources = {};
|
||||
const sourceCodes: SourceCodes = {};
|
||||
_.map(artifact.sources, (value: { id: number }, relativeFilePath: string) => {
|
||||
const filePath = path.resolve(this._sourcesPath, relativeFilePath);
|
||||
const fileContent = fs.readFileSync(filePath).toString();
|
||||
sources[value.id] = filePath;
|
||||
sourceCodes[value.id] = fileContent;
|
||||
const source = this._resolver.resolve(relativeFilePath);
|
||||
sources[value.id] = source.absolutePath;
|
||||
sourceCodes[value.id] = source.source;
|
||||
});
|
||||
const contractData = {
|
||||
sourceCodes,
|
||||
@@ -59,6 +66,10 @@ export class SolCompilerArtifactAdapter extends AbstractArtifactAdapter {
|
||||
runtimeBytecode: artifact.compilerOutput.evm.deployedBytecode.object,
|
||||
sourceMapRuntime: artifact.compilerOutput.evm.deployedBytecode.sourceMap,
|
||||
};
|
||||
const isInterfaceContract = contractData.bytecode === '0x' && contractData.runtimeBytecode === '0x';
|
||||
if (isInterfaceContract) {
|
||||
continue;
|
||||
}
|
||||
contractsData.push(contractData);
|
||||
}
|
||||
return contractsData;
|
||||
|
||||
@@ -5,18 +5,18 @@ import * as parser from 'solidity-parser-antlr';
|
||||
import { ASTVisitor, CoverageEntriesDescription } from './ast_visitor';
|
||||
import { getOffsetToLocation } from './source_maps';
|
||||
|
||||
const IGNORE_RE = /\/\*\s*solcov\s+ignore\s+next\s*\*\/\s*/gm;
|
||||
|
||||
// Parsing source code for each transaction/code is slow and therefore we cache it
|
||||
const sourceHashToCoverageEntries: { [sourceHash: string]: CoverageEntriesDescription } = {};
|
||||
|
||||
export const collectCoverageEntries = (contractSource: string) => {
|
||||
export const collectCoverageEntries = (contractSource: string, ignoreRegexp?: RegExp) => {
|
||||
const sourceHash = ethUtil.sha3(contractSource).toString('hex');
|
||||
if (_.isUndefined(sourceHashToCoverageEntries[sourceHash]) && !_.isUndefined(contractSource)) {
|
||||
const ast = parser.parse(contractSource, { range: true });
|
||||
const offsetToLocation = getOffsetToLocation(contractSource);
|
||||
const ignoreRangesBegingingAt = gatherRangesToIgnore(contractSource);
|
||||
const visitor = new ASTVisitor(offsetToLocation, ignoreRangesBegingingAt);
|
||||
const ignoreRangesBeginningAt = _.isUndefined(ignoreRegexp)
|
||||
? []
|
||||
: gatherRangesToIgnore(contractSource, ignoreRegexp);
|
||||
const visitor = new ASTVisitor(offsetToLocation, ignoreRangesBeginningAt);
|
||||
parser.visit(ast, visitor);
|
||||
sourceHashToCoverageEntries[sourceHash] = visitor.getCollectedCoverageEntries();
|
||||
}
|
||||
@@ -25,12 +25,12 @@ export const collectCoverageEntries = (contractSource: string) => {
|
||||
};
|
||||
|
||||
// Gather the start index of all code blocks preceeded by "/* solcov ignore next */"
|
||||
function gatherRangesToIgnore(contractSource: string): number[] {
|
||||
function gatherRangesToIgnore(contractSource: string, ignoreRegexp: RegExp): number[] {
|
||||
const ignoreRangesStart = [];
|
||||
|
||||
let match;
|
||||
do {
|
||||
match = IGNORE_RE.exec(contractSource);
|
||||
match = ignoreRegexp.exec(contractSource);
|
||||
if (match) {
|
||||
const matchLen = match[0].length;
|
||||
ignoreRangesStart.push(match.index + matchLen);
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
import { addressUtils, BigNumber } from '@0x/utils';
|
||||
import { addressUtils, BigNumber, logUtils } from '@0x/utils';
|
||||
import { OpCode, StructLog } from 'ethereum-types';
|
||||
import { addHexPrefix } from 'ethereumjs-util';
|
||||
import * as _ from 'lodash';
|
||||
|
||||
import { ContractData, LineColumn, SingleFileSourceRange } from './types';
|
||||
|
||||
// This is the minimum length of valid contract bytecode. The Solidity compiler
|
||||
// metadata is 86 bytes. If you add the '0x' prefix, we get 88.
|
||||
const MIN_CONTRACT_BYTECODE_LENGTH = 88;
|
||||
const STATICCALL_GAS_COST = 40;
|
||||
|
||||
const bytecodeToContractDataIfExists: { [bytecode: string]: ContractData | undefined } = {};
|
||||
|
||||
export const utils = {
|
||||
compareLineColumn(lhs: LineColumn, rhs: LineColumn): number {
|
||||
return lhs.line !== rhs.line ? lhs.line - rhs.line : lhs.column - rhs.column;
|
||||
@@ -47,22 +46,29 @@ export const utils = {
|
||||
if (!bytecode.startsWith('0x')) {
|
||||
throw new Error(`0x hex prefix missing: ${bytecode}`);
|
||||
}
|
||||
const contractData = _.find(contractsData, contractDataCandidate => {
|
||||
// HACK(leo): We want to cache the values that are possibly undefined.
|
||||
// That's why we can't check for undefined as we usually do, but need to use `hasOwnProperty`.
|
||||
if (bytecodeToContractDataIfExists.hasOwnProperty(bytecode)) {
|
||||
return bytecodeToContractDataIfExists[bytecode];
|
||||
}
|
||||
const contractDataCandidates = _.filter(contractsData, contractDataCandidate => {
|
||||
const bytecodeRegex = utils.bytecodeToBytecodeRegex(contractDataCandidate.bytecode);
|
||||
// If the bytecode is less than the minimum length, we are probably
|
||||
// dealing with an interface. This isn't what we're looking for.
|
||||
if (bytecodeRegex.length < MIN_CONTRACT_BYTECODE_LENGTH) {
|
||||
return false;
|
||||
}
|
||||
const runtimeBytecodeRegex = utils.bytecodeToBytecodeRegex(contractDataCandidate.runtimeBytecode);
|
||||
if (runtimeBytecodeRegex.length < MIN_CONTRACT_BYTECODE_LENGTH) {
|
||||
return false;
|
||||
}
|
||||
// We use that function to find by bytecode or runtimeBytecode. Those are quasi-random strings so
|
||||
// collisions are practically impossible and it allows us to reuse that code
|
||||
return !_.isNull(bytecode.match(bytecodeRegex)) || !_.isNull(bytecode.match(runtimeBytecodeRegex));
|
||||
});
|
||||
return contractData;
|
||||
if (contractDataCandidates.length > 1) {
|
||||
const candidates = contractDataCandidates.map(
|
||||
contractDataCandidate => _.values(contractDataCandidate.sources)[0],
|
||||
);
|
||||
const errMsg =
|
||||
"We've found more than one artifact that contains the exact same bytecode and therefore are unable to detect which contract was executed. " +
|
||||
"We'll be assigning all traces to the first one.";
|
||||
logUtils.warn(errMsg);
|
||||
logUtils.warn(candidates);
|
||||
}
|
||||
return (bytecodeToContractDataIfExists[bytecode] = contractDataCandidates[0]);
|
||||
},
|
||||
isCallLike(op: OpCode): boolean {
|
||||
return _.includes([OpCode.CallCode, OpCode.StaticCall, OpCode.Call, OpCode.DelegateCall], op);
|
||||
|
||||
@@ -130,7 +130,8 @@ describe('Collect coverage entries', () => {
|
||||
solcovIgnoreContractBaseName,
|
||||
);
|
||||
const solcovIgnoreContract = fs.readFileSync(solcovIgnoreContractFileName).toString();
|
||||
const coverageEntries = collectCoverageEntries(solcovIgnoreContract);
|
||||
const IGNORE_REGEXP = /\/\*\s*solcov\s+ignore\s+next\s*\*\/\s*/gm;
|
||||
const coverageEntries = collectCoverageEntries(solcovIgnoreContract, IGNORE_REGEXP);
|
||||
const fnIds = _.keys(coverageEntries.fnMap);
|
||||
|
||||
expect(fnIds.length).to.be.equal(1);
|
||||
|
||||
Reference in New Issue
Block a user