Segregate indexes by environment

This commit is contained in:
fabioberger
2019-08-26 16:25:40 +02:00
parent a710ebe5b3
commit 361f5ca5cc
20 changed files with 504 additions and 89 deletions

View File

@@ -70,7 +70,7 @@ export ALGOLIA_ADMIN_API_KEY={YOUR_ADMIN_API_KEY}
yarn index_docs
```
The above script will index all the docs found in the `/mdx` folder on [Algolia](https://www.algolia.com/). It's possible to pass in arguments that match the directory names to index only those document types, i.e. `yarn index_docs tools core-concepts` will index tools and core concepts.
The above script will index all the docs found in the `/mdx` folder on [Algolia](https://www.algolia.com/). It's possible to pass in arguments that match the directory names to index only those document types, i.e. `yarn index_docs tools coreConcepts` will index tools and core concepts.
Running the script updates some of the meta information about the files (relative paths to files and versions of the doc). For other types of information (i.e. title, subtitle, tags...) you will have to update it yourself.

View File

@@ -15,9 +15,9 @@
"fix": "tslint --fix --format stylish --project . 'ts/**/*.ts' 'ts/**/*.tsx'",
"update:tools": "aws s3 sync s3://docs-markdown/ mdx/tools/",
"dev": "npm run update:tools; node --max-old-space-size=8192 ../../node_modules/webpack-dev-server/bin/webpack-dev-server.js --mode development --content-base public --https",
"deploy_dogfood": "npm run update:tools; npm run index_docs; npm run build:prod; aws s3 sync ./public/. s3://dogfood.0xproject.com --profile 0xproject --region us-east-1 --grants read=uri=http://acs.amazonaws.com/groups/global/AllUsers",
"deploy_staging": "npm run update:tools; npm run index_docs; npm run build:prod; aws s3 sync ./public/. s3://staging-0xproject --profile 0xproject --region us-east-1 --grants read=uri=http://acs.amazonaws.com/groups/global/AllUsers",
"deploy_live": "npm run update:tools; npm run index_docs; DEPLOY_ROLLBAR_SOURCEMAPS=true npm run build:prod; aws s3 sync ./public/. s3://0x.org --profile 0xproject --region us-east-1 --grants read=uri=http://acs.amazonaws.com/groups/global/AllUsers --exclude *.map.js",
"deploy_dogfood": "npm run update:tools; npm run index_docs --environment dogfood; npm run build:prod; aws s3 sync ./public/. s3://dogfood.0xproject.com --profile 0xproject --region us-east-1 --grants read=uri=http://acs.amazonaws.com/groups/global/AllUsers",
"deploy_staging": "npm run update:tools; npm run index_docs --environment staging; npm run build:prod; aws s3 sync ./public/. s3://staging-0xproject --profile 0xproject --region us-east-1 --grants read=uri=http://acs.amazonaws.com/groups/global/AllUsers",
"deploy_live": "npm run update:tools; npm run index_docs --environment production; DEPLOY_ROLLBAR_SOURCEMAPS=true npm run build:prod; aws s3 sync ./public/. s3://0x.org --profile 0xproject --region us-east-1 --grants read=uri=http://acs.amazonaws.com/groups/global/AllUsers --exclude *.map.js",
"index_docs": "TS_NODE_PROJECT=./tsconfig-indexing.json node --stack-size=16000 -r ts-node/register scripts/algolia_index.ts"
},
"author": "Fabio Berger",
@@ -120,6 +120,7 @@
"@types/styled-components": "4.1.1",
"@types/valid-url": "^1.0.2",
"@types/web3-provider-engine": "^14.0.0",
"@types/yargs": "^11.0.0",
"awesome-typescript-loader": "^5.2.1",
"babel-loader": "^8.0.6",
"cache-loader": "^4.1.0",
@@ -152,6 +153,7 @@
"webpack": "^4.39.2",
"webpack-bundle-analyzer": "^3.4.1",
"webpack-cli": "3.3.7",
"webpack-dev-server": "^3.8.0"
"webpack-dev-server": "^3.8.0",
"yargs": "^10.0.3"
}
}

View File

@@ -0,0 +1,13 @@
const algoliasearch = require('algoliasearch/lite');
import { ALGOLIA_APP_ID } from '../ts/utils/algolia_constants';
const ALGOLIA_ADMIN_API_KEY = process.env.ALGOLIA_ADMIN_API_KEY;
const ALGOLIA_ADMIN_OPTIONS = {
timeouts: {
connect: 10000,
read: 2 * 10000,
write: 30 * 10000,
},
};
export const adminClient = algoliasearch(ALGOLIA_APP_ID, ALGOLIA_ADMIN_API_KEY, ALGOLIA_ADMIN_OPTIONS);

View File

@@ -0,0 +1,341 @@
import { ObjectMap } from '@0x/types';
import * as compareVersions from 'compare-versions';
import * as fs from 'fs';
import * as glob from 'glob';
import * as path from 'path';
import slugify from 'slugify';
import { getNameToSearchIndex } from '../ts/utils/algolia_constants';
import { adminClient } from './algolia_admin_client';
// Note (piotr): can't find type definitions for these
const stringify = require('json-stringify-pretty-compact');
const remark = require('remark');
const mdx = require('remark-mdx');
const slug = require('remark-slug');
const { read } = require('to-vfile');
const findAfter = require('unist-util-find-after');
const modifyChildren = require('unist-util-modify-children');
const { selectAll } = require('unist-util-select');
const meta = require('../ts/utils/algolia_meta.json');
export interface IAlgoliaSettings {
distinct: boolean;
attributeForDistinct: string;
attributesForFaceting: string[];
attributesToSnippet: string[];
searchableAttributes: string[];
snippetEllipsisText: string;
}
const sharedSettings = {
distinct: true,
attributeForDistinct: 'id',
attributesForFaceting: [''],
attributesToSnippet: ['description:20', 'textContent:20'], // attribute:nbWords (number of words to show in a snippet)
searchableAttributes: ['title', 'textContent'],
snippetEllipsisText: '…',
};
const settings: ObjectMap<IAlgoliaSettings> = {
apiExplorer: sharedSettings,
coreConcepts: sharedSettings,
guides: {
...sharedSettings,
attributesForFaceting: ['topics', 'difficulty'],
},
tools: {
...sharedSettings,
attributesForFaceting: ['type', 'tags', 'difficulty', 'isCommunity'],
},
};
export async function indexFilesAsync(indexName: string, environment: string): Promise<void> {
const nameToFile = getNameToFile(indexName); // Get file objects processed to get their meta information (name, path, versions, etc.)
const nameToSearchIndex = getNameToSearchIndex(environment);
const algoliaIndex = adminClient.initIndex(nameToSearchIndex[indexName]);
const algoliaSettings = settings[indexName];
await clearIndexAsync(algoliaIndex);
await setIndexSettingsAsync(algoliaIndex, algoliaSettings);
for (const name of Object.keys(meta[indexName])) {
const metadata = meta[indexName][name];
const file = nameToFile[name];
const isMDX = file !== undefined && file.path !== undefined;
if (isMDX) {
updateMetaFile(file, indexName); // Update the meta file shared between algolia and the page rendering the mdx content on the client
await processMdxAsync(algoliaIndex, file, indexName);
} else {
const titleSlug = slugify(metadata.title, { lower: true });
const content = {
...metadata,
externalUrl: metadata.externalUrl,
id: titleSlug,
objectID: titleSlug,
};
await pushObjectsToAlgoliaAsync(algoliaIndex, [content]);
}
}
}
function getNameToFile(dirName: string): ObjectMap<File> {
const dirPath = path.join(__dirname, `../../mdx/${dirName}`);
const paths = glob.sync(`${dirPath}/**/*.mdx`);
const nameToFile: ObjectMap<File> = {};
for (const p of paths) {
if (dirName === 'tools') {
const name = path.basename(path.join(p, '../../'));
const version = path.basename(path.dirname(p));
const url = `/docs/tools/${name}/${version}`;
const fileIfExists = nameToFile[name];
const fileObject = { name, path: p, version, versions: [version], url };
if (fileIfExists !== undefined) {
if (compareVersions.compare(version, fileIfExists.version, '>')) {
const versions = [...fileIfExists.versions, version]; // Add current version to versions array
nameToFile[name] = { ...fileObject, versions };
}
} else {
nameToFile[name] = fileObject;
}
}
if (dirName === 'guides') {
const { name } = path.parse(p);
const url = `/docs/guides/${name}`;
nameToFile[name] = { name, path: p, url };
}
if (dirName === 'coreConcepts' || dirName === 'apiExplorer') {
const url = `/docs/${dirName}`;
nameToFile[dirName] = { name: dirName, path: p, url };
}
}
return nameToFile;
}
function updateMetaFile(file: File, indexName: string): void {
const [_, relativePath] = file.path.split('mdx/');
meta[indexName][file.name].path = relativePath;
if (file.versions) {
const versionsSortedDesc = file.versions.sort(compareVersions).reverse();
meta[indexName][file.name].versions = versionsSortedDesc;
}
fs.writeFileSync(path.join(__dirname, 'algolia_meta.json'), stringify(meta, { replacer: null, indent: 4 }));
}
async function processMdxAsync(algoliaIndex: any, file: File, indexName: string): Promise<void> {
const content = await read(file.path);
await remark()
.use(slug) // slugify heading text as ids
.use(mdx)
.use(() => async (tree: Node[]) => {
await processContentTreeAsync(tree, file, algoliaIndex, indexName);
})
.process(content);
}
async function processContentTreeAsync(tree: Node[], file: File, algoliaIndex: any, indexName: string): Promise<void> {
const modify = modifyChildren(modifier);
// We first modify the tree to get slugified ids from headings to all text nodes
// This is done to be able to link to a certain section in a doc after clicking a search suggestion
modify(tree);
// Get all text nodes. I.e. 'heading', 'paragraph', 'list' all can have (nested) child text nodes
const textNodes = selectAll('text', tree);
if (textNodes) {
// Combines text nodes that exist on the same line. I.e. if a paragraph
// contains 7 text nodes it combines them into 1. This makes text snippets
// in algolia more descriptive.
const formattedTextNodes = formatTextNodes(textNodes);
// Adds meta and formats information on all formatted text nodes
const content = getContent(file, formattedTextNodes, indexName);
await pushObjectsToAlgoliaAsync(algoliaIndex, content);
}
}
function modifier(node: Node, index: number, parent: Node): void {
if (node.type === 'heading') {
const start = node;
const isEnd = (node: Node) => node.type === 'heading' && node.depth <= start.depth;
const end = findAfter(parent, start, isEnd);
const startIndex = parent.children.indexOf(start);
const endIndex = parent.children.indexOf(end);
// Find all nodes between and including the heading and all nodes before the next heading
const between = parent.children.slice(startIndex, endIndex > 0 ? endIndex : undefined);
// We add the id of the heading as hash part of the url to all text nodes
for (const item of between) {
addHashToChildren(item, start);
}
}
}
function addHashToChildren(item: Node, start: Node): void {
if (item.children) {
for (const child of item.children) {
if (child.type === 'text') {
child.data = child.data || {};
child.data.hash = `#${start.data.id}`;
}
addHashToChildren(child, start);
}
}
}
async function setIndexSettingsAsync(algoliaIndex: any, algoliaSettings: IAlgoliaSettings): Promise<void> {
await algoliaIndex.setSettings(algoliaSettings, (err: string) => {
if (err) {
throw Error(`Error setting index settings: ${err}`);
}
});
}
async function pushObjectsToAlgoliaAsync(algoliaIndex: any, content: Content[]): Promise<void> {
await algoliaIndex
.saveObjects(content)
.then(({ objectIDs }: { objectIDs: string[] }) =>
console.log(
`✨ Pushed content to Algolia with Object IDs ${objectIDs[0]} to ${objectIDs[objectIDs.length - 1]}`,
),
)
.catch((err: string) => {
throw Error(`Error pushing objects to Algolia: ${err}`);
});
}
async function clearIndexAsync(algoliaIndex: any): Promise<void> {
await algoliaIndex.clearIndex((err: string, content: any) => {
if (err) {
throw Error(`Error clearing Algolia index: ${err}`);
}
});
}
function getContent(file: File, formattedTextNodes: FormattedNode[], indexName: string): Content[] {
const { name, url } = file;
const metaData: Meta = meta[indexName][name];
const content: Content[] = [];
formattedTextNodes.forEach((node: FormattedNode, index: number) => {
const titleSlug = slugify(metaData.title, { lower: true });
content.push({
...metaData,
url,
urlWithHash: url + node.hash,
hash: node.hash,
textContent: node.textContent,
id: titleSlug,
objectID: `${titleSlug}_${index}`,
});
});
return content;
}
function formatTextNodes(textNodes: Node[]): FormattedNode[] {
const formattedTextNodes: FormattedNode[] = []; // array structure: [ { line: [LINE_NUMBER], textContent: [MERGED_TEXT_VALUE] } ]
textNodes.map((textNode: Node) => {
const { data, position, value } = textNode;
// If data (hash) is not present on the node it means that the text node occurs before any headings. I.e. in an intro text without a heading.
const hash = data ? data.hash : '';
const { line } = position.start; // Line at which textnode starts (and for paragraphs, headings, ends).
const nodeIndex = formattedTextNodes.findIndex((node: FormattedNode) => node.line === line);
const isIndexPresent = nodeIndex > -1;
if (isIndexPresent) {
formattedTextNodes[nodeIndex].textContent += value; // Merge value with existing text at the given line
} else {
formattedTextNodes.push({ line, hash, textContent: value }); // Create text, hash part of the url, and its start line
}
});
return formattedTextNodes;
}
interface File {
name: string;
path: string;
version?: string;
versions?: string[];
url: string;
}
interface Meta {
description: string;
title: string;
subtitle?: string;
difficulty?: 'Beginner' | 'Intermediate' | 'Advanced';
isCommunity?: boolean;
isFeatured?: boolean;
keywords?: string;
tags?: string[];
topics?: string[];
type?: string;
}
interface Content extends Meta {
url: string;
urlWithHash: string;
hash: string;
textContent: string;
id: string;
objectID: string;
}
interface FormattedNode {
hash: string;
line: number;
textContent: string;
}
// Syntactic units in unist syntax trees are called nodes.
interface Node {
type: string;
children?: Node[];
data?: Data;
depth?: number;
lang?: string;
ordered?: boolean;
position?: Position;
spread?: boolean;
value?: string;
}
// Location of a node in a source file.
interface Position {
start: Point; // Place of the first character of the parsed source region.
end: Point; // Place of the first character after the parsed source region.
indent: number[]; // Start column at each index (plus start line) in the source region
}
// One place in a source file.
interface Point {
line: number; // Line in a source file (1-indexed integer).
column: number; // Column in a source file (1-indexed integer).
offset: number; // Character in a source file (0-indexed integer).
}
// Information associated by the ecosystem with the node.
// Space is guaranteed to never be specified by unist or specifications
// implementing unist.
interface Data {
[key: string]: any;
}

View File

@@ -0,0 +1,36 @@
import * as yargs from 'yargs';
import { getNameToSearchIndex } from '../ts/utils/algolia_constants';
import { indexFilesAsync } from './algolia_helpers';
const args = yargs
.option('indexes', {
alias: ['i'],
describe: 'A comma-separated list of specific indexes one wants to sync',
type: 'string',
normalize: true,
demandOption: false,
default: undefined,
})
.option('environment', {
alias: ['e', 'env'],
describe: 'The environment for which you wish to update the indexes',
type: 'string',
normalize: true,
demandOption: false,
default: 'development',
})
.example("$0 --environment 'production' --index 'tools'", 'Full usage example').argv;
function processIndices(indices: string[], environment: string): void {
for (const indexName of indices) {
void indexFilesAsync(indexName, environment);
}
}
if (args.index !== undefined) {
processIndices(args.indexes.split(','), args.environment); // Use args given to process and push to algolia
} else {
processIndices(Object.keys(getNameToSearchIndex(args.environment)), args.environment); // Process and push all indices
}

View File

@@ -10,7 +10,8 @@ import { AutocompleteOverlay } from 'ts/components/docs/search/autocomplete_over
import { AutocompleteWrapper } from 'ts/components/docs/search/autocomplete_wrapper';
import { docs } from 'ts/style/docs';
import { searchIndices } from 'ts/utils/algolia_constants';
import { getNameToSearchIndex } from 'ts/utils/algolia_constants';
import { environments } from 'ts/utils/environments';
export interface IHit {
description: string;
@@ -108,11 +109,8 @@ const CustomAutoComplete: React.FC<IAutoCompleteProps> = ({
};
const renderSectionTitle = (section: any): React.ReactNode => {
const { tools, guides } = searchIndices;
const coreConcepts = searchIndices['core-concepts'];
// TODO: Add this back in when api - explorer page is ready
// to be indexed and included in the search results (ditto in search_input.tsx)
// const apiExplorer = searchIndices['api-explorer'];
// TODO(fabio): Add `apiExplorer` below once the API Explore page is ready (ditto in search_input.tsx)
const { tools, guides, coreConcepts } = getNameToSearchIndex(environments.getEnvironment());
const titles: { [key: string]: string } = {
// TODO: Add this back in when api - explorer page is ready

View File

@@ -3,24 +3,28 @@ import { Configure, Index, InstantSearch } from 'react-instantsearch-dom';
import { AutoComplete } from 'ts/components/docs/search/autocomplete';
import { hitsPerPage, searchClient, searchIndices } from 'ts/utils/algolia_constants';
import { hitsPerPage, searchClient, getNameToSearchIndex } from 'ts/utils/algolia_constants';
import { environments } from 'ts/utils/environments';
interface ISearchInputProps {
isHome?: boolean;
}
export const SearchInput: React.FC<ISearchInputProps> = ({ isHome }) => (
<InstantSearch searchClient={searchClient} indexName={searchIndices.tools}>
<AutoComplete isHome={isHome} />
<Configure hitsPerPage={hitsPerPage.autocomplete} />
{/* We could map these when we decide to show api explorer in search results */}
<Index indexName={searchIndices.tools} />
<Index indexName={searchIndices.guides} />
<Index indexName={searchIndices['core-concepts']} />
{/*
TODO: Add this back in when api-explorer page is ready
to be indexed and included in the search results (ditto in autocomplete.tsx)
<Index indexName={searchIndices['api-explorer']} />
*/}
</InstantSearch>
);
export const SearchInput: React.FC<ISearchInputProps> = ({ isHome }) => {
const nameToSearchIndex = getNameToSearchIndex(environments.getEnvironment());
return (
<InstantSearch searchClient={searchClient} indexName={nameToSearchIndex.tools}>
<AutoComplete isHome={isHome} />
<Configure hitsPerPage={hitsPerPage.autocomplete} />
{/* We could map these when we decide to show api explorer in search results */}
<Index indexName={nameToSearchIndex.tools} />
<Index indexName={nameToSearchIndex.guides} />
<Index indexName={nameToSearchIndex.coreConcepts} />
{/*
TODO: Add this back in when apiExplorer page is ready
to be indexed and included in the search results (ditto in autocomplete.tsx)
<Index indexName={nameToSearchIndex.apiExplorer} />
*/}
</InstantSearch>
);
};

View File

@@ -10,16 +10,18 @@ import { Filters } from 'ts/components/docs/sidebar/filters';
import { IHit } from 'ts/components/docs/search/autocomplete';
import { hitsPerPage, searchClient, searchIndices } from 'ts/utils/algolia_constants';
import { getNameToSearchIndex, hitsPerPage, searchClient } from 'ts/utils/algolia_constants';
import { environments } from 'ts/utils/environments';
interface IHitsProps {
hits: IHit[];
}
export const DocsGuides: React.FC = () => {
const nameToSearchIndex = getNameToSearchIndex(environments.getEnvironment());
return (
<DocsPageLayout title="Guides">
<InstantSearch searchClient={searchClient} indexName={searchIndices.guides}>
<InstantSearch searchClient={searchClient} indexName={nameToSearchIndex.guides}>
<Configure hitsPerPage={hitsPerPage.pages} />
<Columns>
<Filters filters={filters} />

View File

@@ -16,16 +16,18 @@ import { Separator } from 'ts/components/docs/shared/separator';
import { IHit } from 'ts/components/docs/search/autocomplete';
import { hitsPerPage, searchClient, searchIndices } from 'ts/utils/algolia_constants';
import { getNameToSearchIndex, hitsPerPage, searchClient } from 'ts/utils/algolia_constants';
import { environments } from 'ts/utils/environments';
interface IHitsProps {
hits: IHit[];
}
export const DocsTools: React.FC = () => {
const nameToSearchIndex = getNameToSearchIndex(environments.getEnvironment());
return (
<DocsPageLayout title="Tools">
<InstantSearch searchClient={searchClient} indexName={searchIndices.tools}>
<InstantSearch searchClient={searchClient} indexName={nameToSearchIndex.tools}>
<Configure hitsPerPage={hitsPerPage.pages} />
<Columns>
<Filters filters={filters} />

View File

@@ -12,6 +12,7 @@ import { Dispatcher } from 'ts/redux/dispatcher';
import { DocPackages, ScreenWidths } from 'ts/types';
import { constants } from 'ts/utils/constants';
import { docUtils } from 'ts/utils/doc_utils';
import { environments } from 'ts/utils/environments';
import { Translate } from 'ts/utils/translate';
import { utils } from 'ts/utils/utils';
@@ -20,7 +21,7 @@ import { SupportedDocJson } from '../../types';
import { DocsInfo } from '../../utils/docs_info';
import { TypeDocUtils } from '../../utils/typedoc_utils';
const isDevelopmentOrStaging = utils.isDevelopment() || utils.isStaging();
const isDevelopmentOrStaging = environments.isDevelopment() || environments.isStaging();
const ZERO_EX_JS_VERSION_MISSING_TOPLEVEL_PATH = '0.32.4';
const docIdToSubpackageName: { [id: string]: string } = {

View File

@@ -20,7 +20,7 @@ import { colors } from 'ts/style/colors';
import { TallyInterface } from 'ts/types';
import { configs } from 'ts/utils/configs';
import { documentConstants } from 'ts/utils/document_meta_constants';
import { utils } from 'ts/utils/utils';
import { environments } from 'ts/utils/environments';
interface LabelInterface {
[key: number]: string;
@@ -199,7 +199,7 @@ export class Governance extends React.Component<RouteComponentProps<any>> {
};
private async _fetchVoteStatusAsync(): Promise<void> {
try {
const voteDomain = utils.isProduction()
const voteDomain = environments.isProduction()
? `https://${configs.DOMAIN_VOTE}`
: `https://${configs.DOMAIN_VOTE}/staging`;
const voteEndpoint = `${voteDomain}/v1/tally/${this._proposalData.zeipId}`;

View File

@@ -20,7 +20,7 @@ import { colors } from 'ts/style/colors';
import { InjectedProvider } from 'ts/types';
import { configs } from 'ts/utils/configs';
import { constants } from 'ts/utils/constants';
import { utils } from 'ts/utils/utils';
import { environments } from 'ts/utils/environments';
export enum VoteValue {
Yes = 'Yes',
@@ -232,7 +232,7 @@ export class VoteForm extends React.Component<Props> {
isAwaitingLedgerSignature: false,
}));
const voteDomain = utils.isProduction()
const voteDomain = environments.isProduction()
? `https://${configs.DOMAIN_VOTE}`
: `https://${configs.DOMAIN_VOTE}/staging`;
const voteEndpoint = `${voteDomain}/v1/vote`;

View File

@@ -14,7 +14,7 @@ import { TallyInterface } from 'ts/types';
import { configs } from 'ts/utils/configs';
import { constants } from 'ts/utils/constants';
import { documentConstants } from 'ts/utils/document_meta_constants';
import { utils } from 'ts/utils/utils';
import { environments } from 'ts/utils/environments';
const ZEIP_IDS = Object.keys(proposals).map(idString => parseInt(idString, 10));
const ZEIP_PROPOSALS: Proposal[] = ZEIP_IDS.map(id => proposals[id]).sort(
@@ -79,7 +79,7 @@ export class VoteIndex extends React.Component<VoteIndexProps, VoteIndexState> {
}
private async _fetchVoteStatusAsync(zeipId: number): Promise<TallyInterface> {
try {
const voteDomain = utils.isProduction()
const voteDomain = environments.isProduction()
? `https://${configs.DOMAIN_VOTE}`
: `https://${configs.DOMAIN_VOTE}/staging`;
const voteEndpoint = `${voteDomain}/v1/tally/${zeipId}`;

View File

@@ -2,6 +2,8 @@ const algoliasearch = require('algoliasearch/lite');
import { ObjectMap } from '@0x/types';
import { environments } from './environments';
const ALGOLIA_MAX_NUMBER_HITS = 1000; // Limit set by algolia
export const ALGOLIA_APP_ID = 'HWXKQZ6EUX';
@@ -13,9 +15,13 @@ export const hitsPerPage = {
pages: ALGOLIA_MAX_NUMBER_HITS, // Maximum set by algolia
};
export const searchIndices: ObjectMap<string> = {
'api-explorer': '0x_api_explorer',
'core-concepts': '0x_core_concepts',
guides: '0x_guides',
tools: '0x_tools',
};
export function getNameToSearchIndex(environment: string): ObjectMap<string> {
const lowercaseEnv = environment.toLowerCase();
const nameToSearchIndex: ObjectMap<string> = {
apiExplorer: `${lowercaseEnv}_api_explorer`,
coreConcepts: `${lowercaseEnv}_core_concepts`,
guides: `${lowercaseEnv}_guides`,
tools: `${lowercaseEnv}_tools`,
};
return nameToSearchIndex;
}

View File

@@ -1,12 +1,12 @@
{
"api-explorer": {
"api-explorer": {
"apiExplorer": {
"apiExplorer": {
"title": "API Explorer",
"path": "api-explorer/index.mdx"
}
},
"core-concepts": {
"core-concepts": {
"coreConcepts": {
"coreConcepts": {
"title": "Core Concepts",
"subtitle": "Learn all the core concepts you'll need to build effectively on 0x",
"path": "core-concepts/index.mdx"
@@ -516,4 +516,4 @@
"externalUrl": "https://github.com/0xProject/0x-coordinator-server"
}
}
}
}

View File

@@ -1,4 +1,4 @@
import { GoogleSheetLeadUrls, OutdatedWrappedEtherByNetworkId, PublicNodeUrlsByNetworkId } from 'ts/types';
import { GoogleSheetLeadUrls, OutdatedWrappedEtherByNetworkId, PublicNodeUrlsByNetworkId } from '../types';
const BASE_URL = window.location.origin;
const INFURA_API_KEY = 'T5WSC8cautR4KXyYgsRs';
@@ -16,10 +16,6 @@ export const configs = {
DEFAULT_DERIVATION_PATH: `44'/60'/0'`,
// WARNING: ZRX & WETH MUST always be default trackedTokens
DEFAULT_TRACKED_TOKEN_SYMBOLS: ['WETH', 'ZRX'],
DOMAIN_STAGING: 'staging-0xproject.s3-website-us-east-1.amazonaws.com',
DOMAIN_DOGFOOD: 'dogfood.0xproject.com',
DOMAINS_DEVELOPMENT: ['0xproject.localhost:3572', 'localhost:3572', '127.0.0.1', '0.0.0.0:3572'],
DOMAIN_PRODUCTION: '0x.org',
DOMAIN_VOTE: 'vote.0x.org',
VOTE_INSTANT_ORDER_SOURCE: 'https://api.radarrelay.com/0x/v2/',
VOTE_INSTANT_ASSET_DATAS: ['0xf47261b0000000000000000000000000e41d2489571d322189246dafa5ebde1f4699f498'],

View File

@@ -0,0 +1,6 @@
export const domains = {
DOMAIN_STAGING: 'staging-0xproject.s3-website-us-east-1.amazonaws.com',
DOMAIN_DOGFOOD: 'dogfood.0xproject.com',
DOMAINS_DEVELOPMENT: ['0xproject.localhost:3572', 'localhost:3572', '127.0.0.1', '0.0.0.0:3572'],
DOMAIN_PRODUCTION: '0x.org',
};

View File

@@ -0,0 +1,35 @@
import * as _ from 'lodash';
import { Environments } from '../types';
import { domains } from './domains';
export const environments = {
isDevelopment(): boolean {
return _.includes(domains.DOMAINS_DEVELOPMENT, window.location.host);
},
isStaging(): boolean {
return _.includes(window.location.href, domains.DOMAIN_STAGING);
},
isDogfood(): boolean {
return _.includes(window.location.href, domains.DOMAIN_DOGFOOD);
},
isProduction(): boolean {
return _.includes(window.location.href, domains.DOMAIN_PRODUCTION);
},
getEnvironment(): Environments {
if (environments.isDogfood()) {
return Environments.Dogfood;
}
if (environments.isDevelopment()) {
return Environments.Development;
}
if (environments.isStaging()) {
return Environments.Staging;
}
if (environments.isProduction()) {
return Environments.Production;
}
return Environments.Unknown;
},
};

View File

@@ -1,8 +1,8 @@
import { logUtils } from '@0x/utils';
import Rollbar from 'rollbar';
import { configs } from 'ts/utils/configs';
import { constants } from 'ts/utils/constants';
import { utils } from 'ts/utils/utils';
import { domains } from 'ts/utils/domains';
import { environments } from 'ts/utils/environments';
// Suggested way to include Rollbar with Webpack
// https://github.com/rollbar/rollbar.js/tree/master/examples/webpack
@@ -13,7 +13,7 @@ const rollbarConfig = {
itemsPerMinute: 10,
maxItems: 500,
payload: {
environment: utils.getEnvironment(),
environment: environments.getEnvironment(),
client: {
javascript: {
source_map_enabled: true,
@@ -24,7 +24,7 @@ const rollbarConfig = {
},
},
uncaughtErrorLevel: 'error',
hostWhiteList: [configs.DOMAIN_PRODUCTION, configs.DOMAIN_STAGING],
hostWhiteList: [domains.DOMAIN_PRODUCTION, domains.DOMAIN_STAGING],
ignoredMessages: [
// Errors from the third-party scripts
'Script error',
@@ -42,7 +42,7 @@ const rollbar = new Rollbar(rollbarConfig);
export const errorReporter = {
report(err: Error): void {
if (utils.isDevelopment()) {
if (environments.isDevelopment()) {
return; // Let's not log development errors to rollbar
}
rollbar.error(err, (rollbarErr: Error) => {

View File

@@ -17,7 +17,6 @@ import {
AccountState,
BlockchainCallErrs,
BrowserType,
Environments,
EtherscanLinkSuffixes,
Networks,
OperatingSystemType,
@@ -33,6 +32,7 @@ import {
} from 'ts/types';
import { configs } from 'ts/utils/configs';
import { constants } from 'ts/utils/constants';
import { environments } from 'ts/utils/environments';
import * as u2f from 'ts/vendor/u2f_api';
export const utils = {
@@ -330,43 +330,16 @@ export const utils = {
return parsedProviderName;
},
getBackendBaseUrl(): string {
if (utils.isDogfood()) {
if (environments.isDogfood()) {
return configs.BACKEND_BASE_STAGING_URL;
} else if (utils.isDevelopment()) {
} else if (environments.isDevelopment()) {
return configs.BACKEND_BASE_DEV_URL;
}
return configs.BACKEND_BASE_PROD_URL;
},
isDevelopment(): boolean {
return _.includes(configs.DOMAINS_DEVELOPMENT, window.location.host);
},
isStaging(): boolean {
return _.includes(window.location.href, configs.DOMAIN_STAGING);
},
isExternallyInjected(providerType: ProviderType, injectedProviderName: string): boolean {
return providerType === ProviderType.Injected && injectedProviderName !== constants.PROVIDER_NAME_PUBLIC;
},
isDogfood(): boolean {
return _.includes(window.location.href, configs.DOMAIN_DOGFOOD);
},
isProduction(): boolean {
return _.includes(window.location.href, configs.DOMAIN_PRODUCTION);
},
getEnvironment(): Environments {
if (utils.isDogfood()) {
return Environments.Dogfood;
}
if (utils.isDevelopment()) {
return Environments.Development;
}
if (utils.isStaging()) {
return Environments.Staging;
}
if (utils.isProduction()) {
return Environments.Production;
}
return Environments.Unknown;
},
getEthToken(tokenByAddress: TokenByAddress): Token {
return utils.getTokenBySymbol(constants.ETHER_TOKEN_SYMBOL, tokenByAddress);
},