From 6b094f9caf237af142d3bd02b5a958b8cdee08f3 Mon Sep 17 00:00:00 2001 From: Prithpal Sooriya Date: Wed, 7 Jan 2026 14:30:23 +0000 Subject: [PATCH 1/5] Enhance token service with RWA support and pagination improvements - Introduced support for Real World Assets (RWA) in token fetching and metadata retrieval. - Updated `fetchTokenListByChainId` to handle pagination, limiting to 10 pages to optimize performance. - Refactored token metadata fetching to include RWA data and improved error handling. - Added mock data for testing RWA tokens and updated related tests to ensure coverage. This update enhances the token service's capabilities and improves the overall efficiency of token data retrieval. --- .../src/TokenListController.ts | 34 +- .../TokenSearchDiscoveryDataController.ts | 14 +- .../src/TokensController.test.ts | 16 +- .../src/TokensController.ts | 2 +- .../src/__fixtures__/tokens-api-mocks.ts | 235 ++++++++ .../src/token-service.test.ts | 542 ++++++++---------- .../assets-controllers/src/token-service.ts | 224 ++++++-- 7 files changed, 703 insertions(+), 364 deletions(-) create mode 100644 packages/assets-controllers/src/__fixtures__/tokens-api-mocks.ts diff --git a/packages/assets-controllers/src/TokenListController.ts b/packages/assets-controllers/src/TokenListController.ts index 6ee57c476bb..b344f4a35ed 100644 --- a/packages/assets-controllers/src/TokenListController.ts +++ b/packages/assets-controllers/src/TokenListController.ts @@ -19,7 +19,7 @@ import { formatAggregatorNames, formatIconUrlWithProxy, } from './assetsUtil'; -import { fetchTokenListByChainId } from './token-service'; +import { TokenRwaData, fetchTokenListByChainId } from './token-service'; const DEFAULT_INTERVAL = 24 * 60 * 60 * 1000; const DEFAULT_THRESHOLD = 24 * 60 * 60 * 1000; @@ -34,6 +34,7 @@ export type TokenListToken = { occurrences: number; aggregators: string[]; iconUrl: string; + rwaData?: TokenRwaData; }; export type TokenListMap = Record; @@ -305,26 +306,29 @@ export class TokenListController extends StaticIntervalPollingController - fetchTokenListByChainId( - chainId, - this.abortController.signal, - ) as Promise, + const tokensFromAPI = await safelyExecute(() => + fetchTokenListByChainId(chainId, this.abortController.signal), ); // Have response - process and update list - if (tokensFromAPI) { + if (tokensFromAPI && tokensFromAPI.length > 0) { // Format tokens from API (HTTP) and update tokenList const tokenList: TokenListMap = {}; for (const token of tokensFromAPI) { tokenList[token.address] = { - ...token, - aggregators: formatAggregatorNames(token.aggregators), - iconUrl: formatIconUrlWithProxy({ - chainId, - tokenAddress: token.address, - }), + address: token.address, + symbol: token.symbol, + decimals: token.decimals, + name: token.name, + occurrences: token.occurrences, + aggregators: formatAggregatorNames(token.aggregators ?? []), + iconUrl: + token.iconUrl ?? + formatIconUrlWithProxy({ + chainId, + tokenAddress: token.address, + }), + rwaData: token.rwaData, }; } @@ -338,7 +342,7 @@ export class TokenListController extends StaticIntervalPollingController { const newDataCache: DataCache = { data: {}, timestamp: Date.now() }; state.tokensChainsCache[chainId] ??= newDataCache; diff --git a/packages/assets-controllers/src/TokenSearchDiscoveryDataController/TokenSearchDiscoveryDataController.ts b/packages/assets-controllers/src/TokenSearchDiscoveryDataController/TokenSearchDiscoveryDataController.ts index b62ac16f47a..c76c4c8292d 100644 --- a/packages/assets-controllers/src/TokenSearchDiscoveryDataController/TokenSearchDiscoveryDataController.ts +++ b/packages/assets-controllers/src/TokenSearchDiscoveryDataController/TokenSearchDiscoveryDataController.ts @@ -232,11 +232,23 @@ export class TokenSearchDiscoveryDataController extends BaseController< let tokenMetadata: TokenListToken | undefined; try { - tokenMetadata = await fetchTokenMetadata( + const tokenMetadataResult = await fetchTokenMetadata( chainId, address, this.#abortController.signal, ); + if (tokenMetadataResult) { + tokenMetadata = { + name: tokenMetadataResult.name, + symbol: tokenMetadataResult.symbol, + decimals: tokenMetadataResult.decimals, + address: tokenMetadataResult.address, + aggregators: tokenMetadataResult.aggregators, + iconUrl: tokenMetadataResult.iconUrl, + rwaData: tokenMetadataResult.rwaData, + occurrences: tokenMetadataResult.occurrences, + }; + } } catch (error) { if ( !(error instanceof Error) || diff --git a/packages/assets-controllers/src/TokensController.test.ts b/packages/assets-controllers/src/TokensController.test.ts index 5b94989f1a1..d21815f727d 100644 --- a/packages/assets-controllers/src/TokensController.test.ts +++ b/packages/assets-controllers/src/TokensController.test.ts @@ -27,13 +27,12 @@ import type { } from '@metamask/network-controller'; import { getDefaultNetworkControllerState } from '@metamask/network-controller'; import type { Patch } from 'immer'; -import nock from 'nock'; import * as sinon from 'sinon'; import { v1 as uuidV1 } from 'uuid'; import { ERC20Standard } from './Standards/ERC20Standard'; import { ERC1155Standard } from './Standards/NftStandards/ERC1155/ERC1155Standard'; -import { TOKEN_END_POINT_API } from './token-service'; +import * as TokenServiceModule from './token-service'; import type { Token } from './TokenRatesController'; import { TokensController } from './TokensController'; import type { @@ -1603,7 +1602,7 @@ describe('TokensController', () => { }); }); - it('should throw TokenService error if fetchTokenMetadata returns a response with an error', async () => { + it('should throw TokenService error if fetchTokenMetadata throws unknown', async () => { const chainId = ChainId.mainnet; await withController( @@ -1617,14 +1616,9 @@ describe('TokensController', () => { '0x514910771AF9Ca656af840dff83E8264EcF986CA'; const error = 'An error occured'; const fullErrorMessage = `TokenService Error: ${error}`; - nock(TOKEN_END_POINT_API) - .get( - `/token/${convertHexToDecimal( - chainId, - )}?address=${dummyTokenAddress}`, - ) - .reply(200, { error }) - .persist(); + jest + .spyOn(TokenServiceModule, 'fetchTokenMetadata') + .mockRejectedValue(new Error(fullErrorMessage)); await expect( controller.addToken({ diff --git a/packages/assets-controllers/src/TokensController.ts b/packages/assets-controllers/src/TokensController.ts index 58606100bf3..af08b288cc0 100644 --- a/packages/assets-controllers/src/TokensController.ts +++ b/packages/assets-controllers/src/TokensController.ts @@ -373,7 +373,7 @@ export class TokensController extends BaseController< chainId: Hex, ): Promise { try { - const token = await fetchTokenMetadata( + const token = await fetchTokenMetadata( chainId, tokenAddress, this.#abortController.signal, diff --git a/packages/assets-controllers/src/__fixtures__/tokens-api-mocks.ts b/packages/assets-controllers/src/__fixtures__/tokens-api-mocks.ts new file mode 100644 index 00000000000..7cc22448ba6 --- /dev/null +++ b/packages/assets-controllers/src/__fixtures__/tokens-api-mocks.ts @@ -0,0 +1,235 @@ +export const MOCK_ETHEREUM_TOKENS_METADATA = { + data: [ + { + address: '0xbbbbca6a901c926f240b89eacb641d8aec7aeafd', + symbol: 'LRC', + decimals: 18, + occurrences: 11, + aggregators: [ + 'paraswap', + 'pmm', + 'airswapLight', + 'zeroEx', + 'bancor', + 'coinGecko', + 'zapper', + 'kleros', + 'zerion', + 'cmc', + 'oneInch', + ], + }, + { + address: '0xc011a73ee8576fb46f5e1c5751ca3b9fe0af2a6f', + symbol: 'SNX', + decimals: 18, + occurrences: 11, + aggregators: [ + 'paraswap', + 'pmm', + 'airswapLight', + 'zeroEx', + 'bancor', + 'coinGecko', + 'zapper', + 'kleros', + 'zerion', + 'cmc', + 'oneInch', + ], + name: 'Synthetix', + }, + { + address: '0x408e41876cccdc0f92210600ef50372656052a38', + symbol: 'REN', + decimals: 18, + occurrences: 11, + aggregators: [ + 'paraswap', + 'pmm', + 'airswapLight', + 'zeroEx', + 'bancor', + 'coinGecko', + 'zapper', + 'kleros', + 'zerion', + 'cmc', + 'oneInch', + ], + }, + { + address: '0x514910771af9ca656af840dff83e8264ecf986ca', + symbol: 'LINK', + decimals: 18, + occurrences: 11, + aggregators: [ + 'paraswap', + 'pmm', + 'airswapLight', + 'zeroEx', + 'bancor', + 'coinGecko', + 'zapper', + 'kleros', + 'zerion', + 'cmc', + 'oneInch', + ], + name: 'Chainlink', + }, + { + address: '0x1f573d6fb3f13d689ff844b4ce37794d79a7ff1c', + symbol: 'BNT', + decimals: 18, + occurrences: 11, + aggregators: [ + 'paraswap', + 'pmm', + 'airswapLight', + 'zeroEx', + 'bancor', + 'coinGecko', + 'zapper', + 'kleros', + 'zerion', + 'cmc', + 'oneInch', + ], + name: 'Bancor', + }, + ], + pageInfo: { + hasNextPage: false, + endCursor: '', + }, +}; + +export const MOCK_LINEA_TOKENS_METADATA = { + data: [ + { + address: '0xbbbbca6a901c926f240b89eacb641d8aec7aeafd', + symbol: 'LRC', + decimals: 18, + occurrences: 11, + aggregators: [ + 'lineaTeam', + 'pmm', + 'airswapLight', + 'zeroEx', + 'bancor', + 'coinGecko', + 'zapper', + 'kleros', + 'zerion', + 'cmc', + 'oneInch', + ], + }, + { + address: '0xc011a73ee8576fb46f5e1c5751ca3b9fe0af2a6f', + symbol: 'SNX', + decimals: 18, + occurrences: 11, + aggregators: [ + 'lineaTeam', + 'pmm', + 'airswapLight', + 'zeroEx', + 'bancor', + 'coinGecko', + 'zapper', + 'kleros', + 'zerion', + 'cmc', + 'oneInch', + ], + name: 'Synthetix', + }, + { + address: '0x408e41876cccdc0f92210600ef50372656052a38', + symbol: 'REN', + decimals: 18, + occurrences: 11, + aggregators: [ + 'lineaTeam', + 'pmm', + 'airswapLight', + 'zeroEx', + 'bancor', + 'coinGecko', + 'zapper', + 'kleros', + 'zerion', + 'cmc', + 'oneInch', + ], + }, + { + address: '0x514910771af9ca656af840dff83e8264ecf986ca', + symbol: 'LINK', + decimals: 18, + occurrences: 11, + aggregators: [ + 'lineaTeam', + 'pmm', + 'airswapLight', + 'zeroEx', + 'bancor', + 'coinGecko', + 'zapper', + 'kleros', + 'zerion', + 'cmc', + 'oneInch', + ], + name: 'Chainlink', + }, + { + address: '0x1f573d6fb3f13d689ff844b4ce37794d79a7ff1c', + symbol: 'BNT', + decimals: 18, + occurrences: 11, + aggregators: [ + 'paraswap', + 'pmm', + 'airswapLight', + 'zeroEx', + 'bancor', + 'coinGecko', + 'zapper', + 'kleros', + 'zerion', + 'cmc', + 'oneInch', + ], + name: 'Bancor', + }, + ], + pageInfo: { + hasNextPage: false, + endCursor: '', + }, +}; + +export const MOCK_SINGLE_TOKEN_METADATA = [ + { + aggregators: ['coinGecko', 'liFi', 'rango', 'ondo'], + assetId: 'eip155:1/erc20:0xf6b1117ec07684d3958cad8beb1b302bfd21103f', + decimals: 18, + name: 'Tesla (Ondo Tokenized)', + rwaData: { + market: { + nextOpen: '2026-01-07T14:31:00.000Z', + nextClose: '2026-01-07T14:29:00.000Z', + }, + nextPause: { + start: '2026-01-28T21:00:00.000Z', + end: '2026-01-29T00:30:00.000Z', + }, + ticker: 'TSLA', + instrumentType: 'stock', + }, + symbol: 'TSLAON', + }, +]; diff --git a/packages/assets-controllers/src/token-service.test.ts b/packages/assets-controllers/src/token-service.test.ts index 1d06f437e4a..946d47a76eb 100644 --- a/packages/assets-controllers/src/token-service.test.ts +++ b/packages/assets-controllers/src/token-service.test.ts @@ -1,8 +1,14 @@ import { toHex } from '@metamask/controller-utils'; import type { CaipChainId } from '@metamask/utils'; +import { clone } from 'lodash'; import nock from 'nock'; -import type { SortTrendingBy } from './token-service'; +import { + MOCK_ETHEREUM_TOKENS_METADATA, + MOCK_LINEA_TOKENS_METADATA, + MOCK_SINGLE_TOKEN_METADATA, +} from './__fixtures__/tokens-api-mocks'; +import type { EVMTokenMetadata, SortTrendingBy } from './token-service'; import { fetchTokenListByChainId, fetchTokenMetadata, @@ -10,234 +16,12 @@ import { searchTokens, TOKEN_END_POINT_API, TOKEN_METADATA_NO_SUPPORT_ERROR, + TOKENS_END_POINT_API, } from './token-service'; const ONE_MILLISECOND = 1; const ONE_SECOND_IN_MILLISECONDS = 1_000; -const sampleTokenList = [ - { - address: '0xbbbbca6a901c926f240b89eacb641d8aec7aeafd', - symbol: 'LRC', - decimals: 18, - occurrences: 11, - aggregators: [ - 'paraswap', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - }, - { - address: '0xc011a73ee8576fb46f5e1c5751ca3b9fe0af2a6f', - symbol: 'SNX', - decimals: 18, - occurrences: 11, - aggregators: [ - 'paraswap', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - name: 'Synthetix', - }, - { - address: '0x408e41876cccdc0f92210600ef50372656052a38', - symbol: 'REN', - decimals: 18, - occurrences: 11, - aggregators: [ - 'paraswap', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - }, - { - address: '0x514910771af9ca656af840dff83e8264ecf986ca', - symbol: 'LINK', - decimals: 18, - occurrences: 11, - aggregators: [ - 'paraswap', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - name: 'Chainlink', - }, - { - address: '0x1f573d6fb3f13d689ff844b4ce37794d79a7ff1c', - symbol: 'BNT', - decimals: 18, - occurrences: 11, - aggregators: [ - 'paraswap', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - name: 'Bancor', - }, -]; - -const sampleTokenListLinea = [ - { - address: '0xbbbbca6a901c926f240b89eacb641d8aec7aeafd', - symbol: 'LRC', - decimals: 18, - occurrences: 11, - aggregators: [ - 'lineaTeam', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - }, - { - address: '0xc011a73ee8576fb46f5e1c5751ca3b9fe0af2a6f', - symbol: 'SNX', - decimals: 18, - occurrences: 11, - aggregators: [ - 'lineaTeam', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - name: 'Synthetix', - }, - { - address: '0x408e41876cccdc0f92210600ef50372656052a38', - symbol: 'REN', - decimals: 18, - occurrences: 11, - aggregators: [ - 'lineaTeam', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - }, - { - address: '0x514910771af9ca656af840dff83e8264ecf986ca', - symbol: 'LINK', - decimals: 18, - occurrences: 11, - aggregators: [ - 'lineaTeam', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - name: 'Chainlink', - }, - { - address: '0x1f573d6fb3f13d689ff844b4ce37794d79a7ff1c', - symbol: 'BNT', - decimals: 18, - occurrences: 11, - aggregators: [ - 'paraswap', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - name: 'Bancor', - }, -]; - -const sampleToken = { - address: '0x514910771af9ca656af840dff83e8264ecf986ca', - symbol: 'LINK', - decimals: 18, - occurrences: 11, - aggregators: [ - 'paraswap', - 'pmm', - 'airswapLight', - 'zeroEx', - 'bancor', - 'coinGecko', - 'zapper', - 'kleros', - 'zerion', - 'cmc', - 'oneInch', - ], - name: 'Chainlink', -}; - const sampleSearchResults = [ { address: '0xa0b86a33e6c166428cf041c73490a6b448b7f2c2', @@ -289,47 +73,74 @@ const polygonCaipChainId: CaipChainId = 'eip155:137'; describe('Token service', () => { describe('fetchTokenListByChainId', () => { + const createNockEndpoint = ( + chainId: number, + opts?: { + nockIntercept?: ( + intercept: nock.Interceptor, + ) => nock.Interceptor | nock.Scope; + queryParams?: Record; + response?: unknown; + }, + ): nock.Scope => { + const nockPartial = nock(TOKENS_END_POINT_API) + .get(`/tokens/${chainId}`) + .query({ + occurrenceFloor: '3', + includeTokenFees: 'false', + includeAssetType: 'false', + includeERC20Permit: 'false', + includeStorage: 'false', + includeAggregators: 'true', + includeOccurrences: 'true', + includeIconUrl: 'true', + includeRwaData: 'true', + first: '3000', + ...opts?.queryParams, + }); + + const finalNock = opts?.nockIntercept?.(nockPartial) ?? nockPartial; + + return 'isDone' in finalNock + ? finalNock + : finalNock + .reply(200, opts?.response ?? MOCK_ETHEREUM_TOKENS_METADATA) + .persist(); + }; + it('should call the tokens api and return the list of tokens', async () => { const { signal } = new AbortController(); - nock(TOKEN_END_POINT_API) - .get( - `/tokens/${sampleDecimalChainId}?occurrenceFloor=3&includeNativeAssets=false&includeTokenFees=false&includeAssetType=false&includeERC20Permit=false&includeStorage=false`, - ) - .reply(200, sampleTokenList) - .persist(); + const endpoint = createNockEndpoint(sampleDecimalChainId); const tokens = await fetchTokenListByChainId(sampleChainId, signal); - expect(tokens).toStrictEqual(sampleTokenList); + expect(endpoint.isDone()).toBe(true); + expect(tokens).toStrictEqual(MOCK_ETHEREUM_TOKENS_METADATA.data); }); it('should call the tokens api and return the list of tokens on linea mainnet', async () => { const { signal } = new AbortController(); const lineaChainId = 59144; const lineaHexChain = toHex(lineaChainId); - - nock(TOKEN_END_POINT_API) - .get( - `/tokens/${lineaChainId}?occurrenceFloor=1&includeNativeAssets=false&includeTokenFees=false&includeAssetType=false&includeERC20Permit=false&includeStorage=false`, - ) - .reply(200, sampleTokenListLinea) - .persist(); + const endpoint = createNockEndpoint(lineaChainId, { + response: MOCK_LINEA_TOKENS_METADATA, + queryParams: { + occurrenceFloor: '1', + }, + }); const tokens = await fetchTokenListByChainId(lineaHexChain, signal); - expect(tokens).toStrictEqual(sampleTokenListLinea); + expect(endpoint.isDone()).toBe(true); + expect(tokens).toStrictEqual(MOCK_LINEA_TOKENS_METADATA.data); }); it('should return undefined if the fetch is aborted', async () => { const abortController = new AbortController(); - nock(TOKEN_END_POINT_API) - .get( - `/tokens/${sampleDecimalChainId}?occurrenceFloor=3&includeNativeAssets=false&includeTokenFees=false&includeAssetType=false&includeERC20Permit=false&includeStorage=false`, - ) - // well beyond time it will take to abort - .delay(ONE_SECOND_IN_MILLISECONDS) - .reply(200, sampleTokenList) - .persist(); + const endpoint = createNockEndpoint(sampleDecimalChainId, { + nockIntercept: (intercept) => + intercept.delay(ONE_SECOND_IN_MILLISECONDS), + }); const fetchPromise = fetchTokenListByChainId( sampleChainId, @@ -337,65 +148,156 @@ describe('Token service', () => { ); abortController.abort(); - expect(await fetchPromise).toBeUndefined(); + const result = await fetchPromise; + expect(result).toStrictEqual([]); + expect(endpoint.isDone()).toBe(false); }); it('should return undefined if the fetch fails with a network error', async () => { const { signal } = new AbortController(); - nock(TOKEN_END_POINT_API) - .get( - `/tokens/${sampleDecimalChainId}?occurrenceFloor=3&includeNativeAssets=false&includeTokenFees=false&includeAssetType=false&includeERC20Permit=false&includeStorage=false`, - ) - .replyWithError('Example network error') - .persist(); + + const endpoint = createNockEndpoint(sampleDecimalChainId, { + nockIntercept: (intercept) => + intercept.replyWithError('Example network error'), + }); const result = await fetchTokenListByChainId(sampleChainId, signal); - expect(result).toBeUndefined(); + expect(endpoint.isDone()).toBe(true); + expect(result).toStrictEqual([]); }); it('should return undefined if the fetch fails with an unsuccessful status code', async () => { const { signal } = new AbortController(); - nock(TOKEN_END_POINT_API) - .get( - `/tokens/${sampleDecimalChainId}?occurrenceFloor=3&includeNativeAssets=false&includeTokenFees=false&includeAssetType=false&includeERC20Permit=false&includeStorage=false`, - ) - .reply(500) - .persist(); + const endpoint = createNockEndpoint(sampleDecimalChainId, { + nockIntercept: (intercept) => intercept.reply(500), + }); const result = await fetchTokenListByChainId(sampleChainId, signal); - expect(result).toBeUndefined(); + expect(endpoint.isDone()).toBe(true); + expect(result).toStrictEqual([]); }); it('should return undefined if the fetch fails with a timeout', async () => { const { signal } = new AbortController(); - nock(TOKEN_END_POINT_API) - .get( - `/tokens/${sampleDecimalChainId}?occurrenceFloor=3&includeNativeAssets=false&includeTokenFees=false&includeAssetType=false&includeERC20Permit=false&includeStorage=false`, - ) - // well beyond timeout - .delay(ONE_SECOND_IN_MILLISECONDS) - .reply(200, sampleTokenList) - .persist(); + const endpoint = createNockEndpoint(sampleDecimalChainId, { + nockIntercept: (intercept) => + intercept.delay(ONE_SECOND_IN_MILLISECONDS), + }); const result = await fetchTokenListByChainId(sampleChainId, signal, { timeout: ONE_MILLISECOND, }); - expect(result).toBeUndefined(); + expect(endpoint.isDone()).toBe(true); + expect(result).toStrictEqual([]); + }); + + it('should paginate through tokens until reaches end', async () => { + const response1 = clone(MOCK_ETHEREUM_TOKENS_METADATA); + response1.pageInfo.hasNextPage = true; + response1.pageInfo.endCursor = 'Mjk5OQ=='; + const endpoint1 = createNockEndpoint(sampleDecimalChainId, { + response: response1, + }); + + const response2 = clone(MOCK_ETHEREUM_TOKENS_METADATA); + response2.pageInfo.hasNextPage = false; + response2.pageInfo.endCursor = ''; + const endpoint2 = createNockEndpoint(sampleDecimalChainId, { + queryParams: { + after: 'Mjk5OQ==', + }, + response: response2, + }); + + const { signal } = new AbortController(); + const result = await fetchTokenListByChainId(sampleChainId, signal); + + expect(endpoint1.isDone()).toBe(true); + expect(endpoint2.isDone()).toBe(true); + expect(result).toHaveLength( + response1.data.length + response2.data.length, + ); + }); + + it('should force stop pagination after 10 pages', async () => { + // 20 pages + const nockEndpoints = Array.from({ length: 20 }, (_, index) => { + const mockResponse = clone(MOCK_ETHEREUM_TOKENS_METADATA); + mockResponse.pageInfo.hasNextPage = true; + mockResponse.pageInfo.endCursor = `Mjk5OQ==${index}`; + return createNockEndpoint(sampleDecimalChainId, { + queryParams: + index === 0 + ? undefined + : { + after: `Mjk5OQ==${index - 1}`, + }, + response: clone(MOCK_ETHEREUM_TOKENS_METADATA), + }); + }); + + const { signal } = new AbortController(); + const result = await fetchTokenListByChainId(sampleChainId, signal); + + // Assert first and last endpoint calls + expect(nockEndpoints[0].isDone()).toBe(true); // page 1 is called + expect(nockEndpoints[19].isDone()).toBe(false); // page 20 is never called + + // Assert all endpoints calls + nockEndpoints.forEach((endpoint, index) => { + const isDone = index < 10; + expect(endpoint.isDone()).toBe(isDone); + }); + + // Assert result length (first 10 pages) + expect(result).toHaveLength( + 10 * MOCK_ETHEREUM_TOKENS_METADATA.data.length, + ); }); }); describe('fetchTokenMetadata', () => { + const createNockEndpoint = ( + chainId: number, + tokenAddress: string, + opts?: { + nockIntercept?: ( + intercept: nock.Interceptor, + ) => nock.Interceptor | nock.Scope; + queryParams?: Record; + response?: unknown; + }, + ): nock.Scope => { + const nockPartial = nock(TOKENS_END_POINT_API) + .get(`/v3/assets`) + .query({ + assetIds: `eip155:${chainId}/erc20:${tokenAddress}`, + includeAggregators: 'true', + includeOccurrences: 'true', + includeIconUrl: 'true', + includeMetadata: 'true', + includeRwaData: 'true', + ...opts?.queryParams, + }); + + const finalNock = opts?.nockIntercept?.(nockPartial) ?? nockPartial; + + return 'isDone' in finalNock + ? finalNock + : finalNock + .reply(200, opts?.response ?? MOCK_SINGLE_TOKEN_METADATA) + .persist(); + }; + it('should call the api to return the token metadata for eth address provided', async () => { const { signal } = new AbortController(); - nock(TOKEN_END_POINT_API) - .get( - `/token/${sampleDecimalChainId}?address=0x514910771af9ca656af840dff83e8264ecf986ca`, - ) - .reply(200, sampleToken) - .persist(); + const endpoint = createNockEndpoint( + sampleDecimalChainId, + '0x514910771af9ca656af840dff83e8264ecf986ca', + ); const token = await fetchTokenMetadata( sampleChainId, @@ -403,17 +305,42 @@ describe('Token service', () => { signal, ); - expect(token).toStrictEqual(sampleToken); + const expectedOutput: EVMTokenMetadata = { + address: '0x514910771af9ca656af840dff83e8264ecf986ca', + name: 'Tesla (Ondo Tokenized)', + symbol: 'TSLAON', + decimals: 18, + aggregators: ['coinGecko', 'liFi', 'rango', 'ondo'], + occurrences: 4, + iconUrl: expect.any(String), + rwaData: { + market: { + nextOpen: expect.any(String), + nextClose: expect.any(String), + }, + nextPause: { + start: expect.any(String), + end: expect.any(String), + }, + ticker: 'TSLA', + instrumentType: 'stock', + }, + }; + + expect(endpoint.isDone()).toBe(true); + expect(token).toStrictEqual(expectedOutput); }); it('should return undefined if the fetch is aborted', async () => { const abortController = new AbortController(); - nock(TOKEN_END_POINT_API) - .get(`/tokens/${sampleDecimalChainId}`) - // well beyond time it will take to abort - .delay(ONE_SECOND_IN_MILLISECONDS) - .reply(200, sampleTokenList) - .persist(); + const endpoint = createNockEndpoint( + sampleDecimalChainId, + '0x514910771af9ca656af840dff83e8264ecf986ca', + { + nockIntercept: (intercept) => + intercept.delay(ONE_SECOND_IN_MILLISECONDS), + }, + ); const fetchPromise = fetchTokenMetadata( sampleChainId, @@ -423,14 +350,19 @@ describe('Token service', () => { abortController.abort(); expect(await fetchPromise).toBeUndefined(); + expect(endpoint.isDone()).toBe(false); }); it('should return undefined if the fetch fails with a network error', async () => { const { signal } = new AbortController(); - nock(TOKEN_END_POINT_API) - .get(`/tokens/${sampleDecimalChainId}`) - .replyWithError('Example network error') - .persist(); + const endpoint = createNockEndpoint( + sampleDecimalChainId, + '0x514910771af9ca656af840dff83e8264ecf986ca', + { + nockIntercept: (intercept) => + intercept.replyWithError('Example network error'), + }, + ); const tokenMetadata = await fetchTokenMetadata( sampleChainId, @@ -438,15 +370,19 @@ describe('Token service', () => { signal, ); + expect(endpoint.isDone()).toBe(true); expect(tokenMetadata).toBeUndefined(); }); it('should return undefined if the fetch fails with an unsuccessful status code', async () => { const { signal } = new AbortController(); - nock(TOKEN_END_POINT_API) - .get(`/tokens/${sampleDecimalChainId}`) - .reply(500) - .persist(); + const endpoint = createNockEndpoint( + sampleDecimalChainId, + '0x514910771af9ca656af840dff83e8264ecf986ca', + { + nockIntercept: (intercept) => intercept.reply(500), + }, + ); const tokenMetadata = await fetchTokenMetadata( sampleChainId, @@ -454,17 +390,20 @@ describe('Token service', () => { signal, ); + expect(endpoint.isDone()).toBe(true); expect(tokenMetadata).toBeUndefined(); }); it('should return undefined if the fetch fails with a timeout', async () => { const { signal } = new AbortController(); - nock(TOKEN_END_POINT_API) - .get(`/tokens/${sampleDecimalChainId}`) - // well beyond timeout - .delay(ONE_SECOND_IN_MILLISECONDS) - .reply(200, sampleTokenList) - .persist(); + const endpoint = createNockEndpoint( + sampleDecimalChainId, + '0x514910771af9ca656af840dff83e8264ecf986ca', + { + nockIntercept: (intercept) => + intercept.delay(ONE_SECOND_IN_MILLISECONDS), + }, + ); const tokenMetadata = await fetchTokenMetadata( sampleChainId, @@ -474,10 +413,16 @@ describe('Token service', () => { ); expect(tokenMetadata).toBeUndefined(); + expect(endpoint.isDone()).toBe(true); // called, but response is timed out }); it('should throw error if fetching from non supported network', async () => { const { signal } = new AbortController(); + const endpoint = createNockEndpoint( + 5, + '0x514910771af9ca656af840dff83e8264ecf986ca', + ); + await expect( fetchTokenMetadata( toHex(5), @@ -485,6 +430,7 @@ describe('Token service', () => { signal, ), ).rejects.toThrow(TOKEN_METADATA_NO_SUPPORT_ERROR); + expect(endpoint.isDone()).toBe(false); // endpoint is never called since we capture it as unsupported }); }); diff --git a/packages/assets-controllers/src/token-service.ts b/packages/assets-controllers/src/token-service.ts index 3048b9bbcbb..2e7bac3f8e1 100644 --- a/packages/assets-controllers/src/token-service.ts +++ b/packages/assets-controllers/src/token-service.ts @@ -4,26 +4,105 @@ import { handleFetch, timeoutFetch, } from '@metamask/controller-utils'; -import type { CaipAssetType, CaipChainId, Hex } from '@metamask/utils'; +import { + CaipAssetType, + CaipChainId, + Hex, + KnownCaipNamespace, + toCaipAssetType, + toCaipChainId, + parseCaipChainId, + hexToNumber, +} from '@metamask/utils'; -import { isTokenListSupportedForNetwork } from './assetsUtil'; +import { + formatIconUrlWithProxy, + isTokenListSupportedForNetwork, +} from './assetsUtil'; export const TOKEN_END_POINT_API = 'https://token.api.cx.metamask.io'; +export const TOKENS_END_POINT_API = 'https://tokens.dev-api.cx.metamask.io'; export const TOKEN_METADATA_NO_SUPPORT_ERROR = 'TokenService Error: Network does not support fetchTokenMetadata'; +export type TokenRwaData = { + market?: { + nextOpen?: string; + nextClose?: string; + }; + nextPause?: { + start?: string; + end?: string; + }; + ticker?: string; + instrumentType?: string; +}; + +export type GetTokensUrlResponse = { + data: { + address: string; + symbol: string; + decimals: number; + name: string; + aggregators: string[]; + occurrences: number; + iconUrl?: string; + rwaData?: TokenRwaData; + }[]; + pageInfo: { + hasNextPage: boolean; + endCursor: string; + }; +}; + +export type GetTokenMetadataUrlResponse = { + assetId: CaipAssetType; + symbol: string; + decimals: number; + name: string; + aggregators: string[]; + rwaData?: TokenRwaData; +}[]; + +export type EVMTokenMetadata = { + name: string; + symbol: string; + decimals: number; + address: string; + aggregators: string[]; + occurrences: number; + iconUrl: string; + rwaData?: TokenRwaData; +}; + /** * Get the tokens URL for a specific network. * * @param chainId - The chain ID of the network the tokens requested are on. + * @param nextCursor - The cursor to the next page of tokens. * @returns The tokens URL. */ -function getTokensURL(chainId: Hex): string { +function getTokensURL(chainId: Hex, nextCursor?: string): string { const occurrenceFloor = chainId === ChainId['linea-mainnet'] ? 1 : 3; - return `${TOKEN_END_POINT_API}/tokens/${convertHexToDecimal( + const queryParams = new URLSearchParams(); + queryParams.append('occurrenceFloor', occurrenceFloor.toString()); + queryParams.append('includeTokenFees', 'false'); + queryParams.append('includeAssetType', 'false'); + queryParams.append('includeERC20Permit', 'false'); + queryParams.append('includeStorage', 'false'); + queryParams.append('includeAggregators', 'true'); + queryParams.append('includeOccurrences', 'true'); + queryParams.append('includeIconUrl', 'true'); + queryParams.append('includeRwaData', 'true'); + queryParams.append('first', '3000'); + if (nextCursor) { + queryParams.append('after', nextCursor); + } + + return `${TOKENS_END_POINT_API}/tokens/${convertHexToDecimal( chainId, - )}?occurrenceFloor=${occurrenceFloor}&includeNativeAssets=false&includeTokenFees=false&includeAssetType=false&includeERC20Permit=false&includeStorage=false`; + )}?${queryParams.toString()}`; } /** @@ -34,9 +113,24 @@ function getTokensURL(chainId: Hex): string { * @returns The token metadata URL. */ function getTokenMetadataURL(chainId: Hex, tokenAddress: string): string { - return `${TOKEN_END_POINT_API}/token/${convertHexToDecimal( - chainId, - )}?address=${tokenAddress}`; + const queryParams = new URLSearchParams(); + const caipChainId = parseCaipChainId( + toCaipChainId(KnownCaipNamespace.Eip155, hexToNumber(chainId).toString()), + ); + const assetId = toCaipAssetType( + caipChainId.namespace, + caipChainId.reference, + 'erc20', + tokenAddress, + ); + + queryParams.append('includeAggregators', 'true'); + queryParams.append('includeOccurrences', 'true'); + queryParams.append('includeIconUrl', 'true'); + queryParams.append('includeMetadata', 'true'); + queryParams.append('includeRwaData', 'true'); + + return `${TOKENS_END_POINT_API}/v3/assets?assetIds=${assetId}&${queryParams.toString()}`; } /** @@ -148,35 +242,63 @@ export async function fetchTokenListByChainId( chainId: Hex, abortSignal: AbortSignal, { timeout = defaultTimeout } = {}, -): Promise { - const tokenURL = getTokensURL(chainId); - const response = await queryApi(tokenURL, abortSignal, timeout); - if (response) { +): Promise { + // TODO: We really need to move away from fetching all tokens at once + // This is expensive - uses up a lot of memory and bandwidth + // Need to discuss how we can fully deprecate this - many areas require this metadata (decimals, icon, rwaData) + const allTokens: GetTokensUrlResponse['data'] = []; + let nextCursor: string | undefined; + + // If we are still fetching tokens past 10 pages of 3000 tokens (30000), + // then we really need to re-evaluate our approach + const hardPaginationLimit = 10; + let paginationCount = 1; + + do { + const tokenURL = getTokensURL(chainId, nextCursor); + const response = await queryApi(tokenURL, abortSignal, timeout); + if (!response) { + break; + } + const result = await parseJsonResponse(response); - if (Array.isArray(result) && chainId === ChainId['linea-mainnet']) { - return result.filter( - (elm) => - Boolean(elm.aggregators.includes('lineaTeam')) || - elm.aggregators.length >= 3, - ); + + // Ensure result is typed with GetTokensUrlResponse and handles pagination + if ( + result && + typeof result === 'object' && + 'data' in result && + Array.isArray(result.data) + ) { + const typedResult = result as GetTokensUrlResponse; + + allTokens.push(...typedResult.data); + + nextCursor = typedResult.pageInfo.hasNextPage + ? typedResult.pageInfo.endCursor + : undefined; } - return result; + paginationCount += 1; + } while (nextCursor && paginationCount <= hardPaginationLimit); + + if (paginationCount >= hardPaginationLimit) { + console.warn( + `TokenService: Token list pagination limit reached for chainId ${chainId}`, + ); + return allTokens; } - return undefined; -} -export type TokenRwaData = { - market?: { - nextOpen?: string; - nextClose?: string; - }; - nextPause?: { - start?: string; - end?: string; - }; - ticker?: string; - instrumentType?: string; -}; + // Special filter logic for linea-mainnet (preserved from original) + if (chainId === ChainId['linea-mainnet']) { + return allTokens.filter( + (elm) => + Boolean(elm.aggregators?.includes('lineaTeam')) || + (elm.aggregators && elm.aggregators.length >= 3), + ); + } + + return allTokens; +} export type TokenSearchItem = { assetId: CaipAssetType; @@ -348,21 +470,47 @@ export async function getTrendingTokens({ * @param options.timeout - The fetch timeout. * @returns The token metadata, or `undefined` if the request was either aborted or failed. */ -export async function fetchTokenMetadata( +export async function fetchTokenMetadata( chainId: Hex, tokenAddress: string, abortSignal: AbortSignal, { timeout = defaultTimeout } = {}, -): Promise { +): Promise { if (!isTokenListSupportedForNetwork(chainId)) { throw new Error(TOKEN_METADATA_NO_SUPPORT_ERROR); } const tokenMetadataURL = getTokenMetadataURL(chainId, tokenAddress); const response = await queryApi(tokenMetadataURL, abortSignal, timeout); - if (response) { - return parseJsonResponse(response) as Promise; + if (!response) { + return undefined; } - return undefined; + + const result = await parseJsonResponse(response); + if (!result || !Array.isArray(result)) { + return undefined; + } + + const typedResult = result as GetTokenMetadataUrlResponse; + const singleToken = typedResult.at(0); + if (!singleToken) { + return undefined; + } + + const tokenMetadata: EVMTokenMetadata = { + name: singleToken.name, + symbol: singleToken.symbol, + decimals: singleToken.decimals, + address: tokenAddress, + aggregators: singleToken.aggregators, + occurrences: singleToken.aggregators?.length ?? 0, + iconUrl: formatIconUrlWithProxy({ + chainId, + tokenAddress, + }), + rwaData: singleToken.rwaData, + }; + + return tokenMetadata; } /** From 92bf9cb5fdd96256c6f7d9977b429186ce07b1bf Mon Sep 17 00:00:00 2001 From: Prithpal Sooriya Date: Wed, 7 Jan 2026 22:21:46 +0000 Subject: [PATCH 2/5] test: fix UTs --- .../src/TokenListController.test.ts | 137 +++++++++++++----- .../src/TokenListController.ts | 5 +- 2 files changed, 108 insertions(+), 34 deletions(-) diff --git a/packages/assets-controllers/src/TokenListController.test.ts b/packages/assets-controllers/src/TokenListController.test.ts index b6a03aa7b52..3faba992f8b 100644 --- a/packages/assets-controllers/src/TokenListController.test.ts +++ b/packages/assets-controllers/src/TokenListController.test.ts @@ -30,6 +30,8 @@ import { buildInfuraNetworkClientConfiguration, buildMockGetNetworkClientById, } from '../../network-controller/tests/helpers'; +import { MOCK_ETHEREUM_TOKENS_METADATA } from './__fixtures__/tokens-api-mocks'; +import { clone } from 'lodash'; const namespace = 'TokenListController'; const timestamp = Date.now(); @@ -234,7 +236,6 @@ const sampleSepoliaTokenList = [ name: 'Wrapped BTC', iconUrl: 'https://static.cx.metamask.io/api/v1/tokenIcons/11155111/0x2260fac5e5542a773aa44fbcfedf7c193bc2c599.png', - type: 'erc20', aggregators: [ 'Metamask', 'Aave', @@ -253,10 +254,6 @@ const sampleSepoliaTokenList = [ 'Coinmarketcap', ], occurrences: 15, - fees: {}, - storage: { - balance: 0, - }, }, { address: '0x04fa0d235c4abf4bcf4787af4cf447de572ef828', @@ -265,7 +262,6 @@ const sampleSepoliaTokenList = [ name: 'UMA', iconUrl: 'https://static.cx.metamask.io/api/v1/tokenIcons/11155111/0x04fa0d235c4abf4bcf4787af4cf447de572ef828.png', - type: 'erc20', aggregators: [ 'Metamask', 'Bancor', @@ -282,7 +278,6 @@ const sampleSepoliaTokenList = [ 'Coinmarketcap', ], occurrences: 13, - fees: {}, }, { address: '0x6810e776880c02933d47db1b9fc05908e5386b96', @@ -291,7 +286,6 @@ const sampleSepoliaTokenList = [ name: 'Gnosis Token', iconUrl: 'https://static.cx.metamask.io/api/v1/tokenIcons/11155111/0x6810e776880c02933d47db1b9fc05908e5386b96.png', - type: 'erc20', aggregators: [ 'Metamask', 'Bancor', @@ -307,10 +301,44 @@ const sampleSepoliaTokenList = [ 'Coinmarketcap', ], occurrences: 12, - fees: {}, }, ]; +const createNockEndpoint = ( + chainId: number, + opts?: { + nockIntercept?: ( + intercept: nock.Interceptor, + ) => nock.Interceptor | nock.Scope; + queryParams?: Record; + response?: unknown; + }, +): nock.Scope => { + const nockPartial = nock(tokenService.TOKENS_END_POINT_API) + .get(`/tokens/${chainId}`) + .query({ + occurrenceFloor: '3', + includeTokenFees: 'false', + includeAssetType: 'false', + includeERC20Permit: 'false', + includeStorage: 'false', + includeAggregators: 'true', + includeOccurrences: 'true', + includeIconUrl: 'true', + includeRwaData: 'true', + first: '3000', + ...opts?.queryParams, + }); + + const finalNock = opts?.nockIntercept?.(nockPartial) ?? nockPartial; + + return 'isDone' in finalNock + ? finalNock + : finalNock + .reply(200, opts?.response ?? MOCK_ETHEREUM_TOKENS_METADATA) + .persist(); +}; + const sampleSepoliaTokensChainCache = sampleSepoliaTokenList.reduce((output, current) => { output[current.address] = current; @@ -606,10 +634,14 @@ describe('TokenListController', () => { }); it('should update tokensChainsCache state when network updates are passed via onNetworkStateChange callback', async () => { - nock(tokenService.TOKEN_END_POINT_API) - .get(getTokensPath(ChainId.mainnet)) - .reply(200, sampleMainnetTokenList) - .persist(); + const mainnetEndpointResponse = clone(MOCK_ETHEREUM_TOKENS_METADATA); + mainnetEndpointResponse.data = sampleMainnetTokenList; + const mainnetEndpoint = createNockEndpoint( + convertHexToDecimal(ChainId.mainnet), + { + response: mainnetEndpointResponse, + }, + ); jest.spyOn(Date, 'now').mockImplementation(() => 100); const selectedNetworkClientId = 'selectedNetworkClientId'; @@ -718,6 +750,7 @@ describe('TokenListController', () => { }, '0x539': { timestamp: 100, data: {} }, }); + expect(mainnetEndpoint.isDone()).toBe(true); controller.destroy(); }); @@ -851,10 +884,14 @@ describe('TokenListController', () => { }); it('should update tokensChainsCache from api', async () => { - nock(tokenService.TOKEN_END_POINT_API) - .get(getTokensPath(ChainId.mainnet)) - .reply(200, sampleMainnetTokenList) - .persist(); + const mainnetEndpointResponse = clone(MOCK_ETHEREUM_TOKENS_METADATA); + mainnetEndpointResponse.data = sampleMainnetTokenList; + const mainnetEndpoint = createNockEndpoint( + convertHexToDecimal(ChainId.mainnet), + { + response: mainnetEndpointResponse, + }, + ); const messenger = getMessenger(); const restrictedMessenger = getRestrictedMessenger(messenger); @@ -879,6 +916,7 @@ describe('TokenListController', () => { ).toBeGreaterThanOrEqual( sampleSingleChainState.tokensChainsCache[ChainId.mainnet].timestamp, ); + expect(mainnetEndpoint.isDone()).toBe(true); controller.destroy(); } finally { controller.destroy(); @@ -939,10 +977,14 @@ describe('TokenListController', () => { }); it('should update the cache when the timestamp expires', async () => { - nock(tokenService.TOKEN_END_POINT_API) - .get(getTokensPath(ChainId.mainnet)) - .reply(200, sampleMainnetTokenList) - .persist(); + const mainnetEndpointResponse = clone(MOCK_ETHEREUM_TOKENS_METADATA); + mainnetEndpointResponse.data = sampleMainnetTokenList; + const mainnetEndpoint = createNockEndpoint( + convertHexToDecimal(ChainId.mainnet), + { + response: mainnetEndpointResponse, + }, + ); const messenger = getMessenger(); const restrictedMessenger = getRestrictedMessenger(messenger); @@ -965,22 +1007,46 @@ describe('TokenListController', () => { ).toStrictEqual( sampleSingleChainState.tokensChainsCache[ChainId.mainnet].data, ); + expect(mainnetEndpoint.isDone()).toBe(true); controller.destroy(); }); it('should update tokensChainsCache when the chainId change', async () => { - nock(tokenService.TOKEN_END_POINT_API) - .get(getTokensPath(ChainId.mainnet)) - .reply(200, sampleMainnetTokenList) - .get(getTokensPath(ChainId.sepolia)) - .reply(200, { - error: `ChainId ${convertHexToDecimal( - ChainId.sepolia, - )} is not supported`, - }) - .get(getTokensPath(toHex(56))) - .reply(200, sampleBinanceTokenList) - .persist(); + const sepoliaEndpoint = createNockEndpoint( + convertHexToDecimal(ChainId.sepolia), + { + nockIntercept: (intercept) => + intercept.reply(200, { + error: `ChainId ${convertHexToDecimal( + ChainId.sepolia, + )} is not supported`, + }), + }, + ); + const binanceEndpointResponse = clone(MOCK_ETHEREUM_TOKENS_METADATA); + binanceEndpointResponse.data = sampleBinanceTokenList; + const binanceEndpoint = createNockEndpoint(56, { + response: binanceEndpointResponse, + }); + + type Endpoint = 'sepolia' | 'binance'; + const assertEndpointCalls = (opts: { + done: Endpoint[]; + notDone: Endpoint[]; + }): void => { + const endpointMap = { + sepolia: sepoliaEndpoint, + binance: binanceEndpoint, + }; + + opts.done.forEach((endpoint) => { + expect(endpointMap[endpoint].isDone()).toBe(true); + }); + opts.notDone.forEach((endpoint) => { + expect(endpointMap[endpoint].isDone()).toBe(false); + }); + }; + const selectedCustomNetworkClientId = 'selectedCustomNetworkClientId'; const messenger = getMessenger(); const getNetworkClientById = buildMockGetNetworkClientById({ @@ -1012,6 +1078,7 @@ describe('TokenListController', () => { sampleTwoChainState.tokensChainsCache[ChainId.mainnet].data, ); + // Change to sepolia messenger.publish( 'NetworkController:stateChange', { @@ -1032,6 +1099,8 @@ describe('TokenListController', () => { sampleTwoChainState.tokensChainsCache[ChainId.mainnet].data, ); + assertEndpointCalls({ done: ['sepolia'], notDone: ['binance'] }); + messenger.publish( 'NetworkController:stateChange', { @@ -1052,6 +1121,8 @@ describe('TokenListController', () => { sampleTwoChainState.tokensChainsCache[ChainId.mainnet].data, ); + assertEndpointCalls({ done: ['sepolia', 'binance'], notDone: [] }); + expect(controller.state.tokensChainsCache[toHex(56)].data).toStrictEqual( sampleTwoChainState.tokensChainsCache[toHex(56)].data, ); diff --git a/packages/assets-controllers/src/TokenListController.ts b/packages/assets-controllers/src/TokenListController.ts index b344f4a35ed..14ab426a63f 100644 --- a/packages/assets-controllers/src/TokenListController.ts +++ b/packages/assets-controllers/src/TokenListController.ts @@ -328,8 +328,11 @@ export class TokenListController extends StaticIntervalPollingController { From 38817a6a59e25984a1b24e173fd717721e5b07b5 Mon Sep 17 00:00:00 2001 From: Prithpal Sooriya Date: Wed, 7 Jan 2026 22:28:17 +0000 Subject: [PATCH 3/5] refactor: cleanup --- packages/assets-controllers/src/token-service.test.ts | 3 +++ packages/assets-controllers/src/token-service.ts | 10 ++++------ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/packages/assets-controllers/src/token-service.test.ts b/packages/assets-controllers/src/token-service.test.ts index 946d47a76eb..9cc51f1054e 100644 --- a/packages/assets-controllers/src/token-service.test.ts +++ b/packages/assets-controllers/src/token-service.test.ts @@ -244,10 +244,13 @@ describe('Token service', () => { // Assert first and last endpoint calls expect(nockEndpoints[0].isDone()).toBe(true); // page 1 is called + expect(nockEndpoints[9].isDone()).toBe(true); // page 10 is called + expect(nockEndpoints[10].isDone()).toBe(false); // page 11 is never called expect(nockEndpoints[19].isDone()).toBe(false); // page 20 is never called // Assert all endpoints calls nockEndpoints.forEach((endpoint, index) => { + // 10 pages, so index 0 to 9 are done, 10 is never called const isDone = index < 10; expect(endpoint.isDone()).toBe(isDone); }); diff --git a/packages/assets-controllers/src/token-service.ts b/packages/assets-controllers/src/token-service.ts index 2e7bac3f8e1..108946c5f88 100644 --- a/packages/assets-controllers/src/token-service.ts +++ b/packages/assets-controllers/src/token-service.ts @@ -252,7 +252,7 @@ export async function fetchTokenListByChainId( // If we are still fetching tokens past 10 pages of 3000 tokens (30000), // then we really need to re-evaluate our approach const hardPaginationLimit = 10; - let paginationCount = 1; + let paginationCount = 0; do { const tokenURL = getTokensURL(chainId, nextCursor); @@ -263,7 +263,6 @@ export async function fetchTokenListByChainId( const result = await parseJsonResponse(response); - // Ensure result is typed with GetTokensUrlResponse and handles pagination if ( result && typeof result === 'object' && @@ -274,12 +273,12 @@ export async function fetchTokenListByChainId( allTokens.push(...typedResult.data); - nextCursor = typedResult.pageInfo.hasNextPage - ? typedResult.pageInfo.endCursor + nextCursor = typedResult?.pageInfo?.hasNextPage + ? typedResult?.pageInfo?.endCursor : undefined; } paginationCount += 1; - } while (nextCursor && paginationCount <= hardPaginationLimit); + } while (nextCursor && paginationCount < hardPaginationLimit); if (paginationCount >= hardPaginationLimit) { console.warn( @@ -288,7 +287,6 @@ export async function fetchTokenListByChainId( return allTokens; } - // Special filter logic for linea-mainnet (preserved from original) if (chainId === ChainId['linea-mainnet']) { return allTokens.filter( (elm) => From 0e9c6a8e0368a8209b5162bfe2f3a0cf8d8a0253 Mon Sep 17 00:00:00 2001 From: Prithpal Sooriya Date: Fri, 9 Jan 2026 09:27:41 +0000 Subject: [PATCH 4/5] refactor: clean up linting errors --- packages/assets-controllers/src/TokenListController.test.ts | 4 ++-- packages/assets-controllers/src/TokensController.test.ts | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/assets-controllers/src/TokenListController.test.ts b/packages/assets-controllers/src/TokenListController.test.ts index 3faba992f8b..363bb5af960 100644 --- a/packages/assets-controllers/src/TokenListController.test.ts +++ b/packages/assets-controllers/src/TokenListController.test.ts @@ -14,9 +14,11 @@ import type { } from '@metamask/messenger'; import type { NetworkState } from '@metamask/network-controller'; import type { Hex } from '@metamask/utils'; +import { clone } from 'lodash'; import nock from 'nock'; import * as sinon from 'sinon'; +import { MOCK_ETHEREUM_TOKENS_METADATA } from './__fixtures__/tokens-api-mocks'; import * as tokenService from './token-service'; import type { TokenListMap, @@ -30,8 +32,6 @@ import { buildInfuraNetworkClientConfiguration, buildMockGetNetworkClientById, } from '../../network-controller/tests/helpers'; -import { MOCK_ETHEREUM_TOKENS_METADATA } from './__fixtures__/tokens-api-mocks'; -import { clone } from 'lodash'; const namespace = 'TokenListController'; const timestamp = Date.now(); diff --git a/packages/assets-controllers/src/TokensController.test.ts b/packages/assets-controllers/src/TokensController.test.ts index d21815f727d..73008c861d4 100644 --- a/packages/assets-controllers/src/TokensController.test.ts +++ b/packages/assets-controllers/src/TokensController.test.ts @@ -10,7 +10,6 @@ import { ApprovalType, ChainId, ORIGIN_METAMASK, - convertHexToDecimal, InfuraNetworkType, } from '@metamask/controller-utils'; import type { InternalAccount } from '@metamask/keyring-internal-api'; From 3bd89c3ef5f33f2c9f269ac0df9c24e0ed8ff843 Mon Sep 17 00:00:00 2001 From: Prithpal Sooriya Date: Fri, 9 Jan 2026 09:45:53 +0000 Subject: [PATCH 5/5] feat: move to prod endpoint --- packages/assets-controllers/src/token-service.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/assets-controllers/src/token-service.ts b/packages/assets-controllers/src/token-service.ts index 108946c5f88..981b76069c3 100644 --- a/packages/assets-controllers/src/token-service.ts +++ b/packages/assets-controllers/src/token-service.ts @@ -21,7 +21,7 @@ import { } from './assetsUtil'; export const TOKEN_END_POINT_API = 'https://token.api.cx.metamask.io'; -export const TOKENS_END_POINT_API = 'https://tokens.dev-api.cx.metamask.io'; +export const TOKENS_END_POINT_API = 'https://tokens.api.cx.metamask.io'; export const TOKEN_METADATA_NO_SUPPORT_ERROR = 'TokenService Error: Network does not support fetchTokenMetadata'; @@ -105,6 +105,8 @@ function getTokensURL(chainId: Hex, nextCursor?: string): string { )}?${queryParams.toString()}`; } +console.log('getTokensURL', getTokensURL(ChainId.mainnet)); + /** * Get the token metadata URL for the given network and token. *