Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature resize images #27

Open
wants to merge 15 commits into
base: main
Choose a base branch
from
Open
2 changes: 2 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,5 @@ IPFS_PIN_JWT=asdf
MAX_PROMISES_PER_MINUTE=1000
IPFS_PIN_MAX_PROMISES_PER_MINUTE=60
POSTGRAPHILE_SERVE_PORT=6001
MAX_IMAGEMAGIC_PER_MINUTE=300
MAX_CONCURRENT_IMAGEMAGIC=20
2 changes: 2 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{
}
100,252 changes: 76,431 additions & 23,821 deletions db/db_dump

Large diffs are not rendered by default.

8 changes: 6 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
"license": "ISC",
"devDependencies": {
"@babel/core": "^7.17.9",
"@types/imagemagick": "^0.0.31",
"@types/lodash": "^4.14.181",
"@types/node": "^17.0.35",
"@types/prompt": "^1.1.2",
Expand All @@ -31,8 +32,8 @@
"babel-plugin-module-resolver": "^4.1.0",
"eslint": "^8.16.0",
"eslint-import-resolver-babel-module": "^5.3.1",
"eslint-plugin-unused-imports": "^2.0.0",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-unused-imports": "^2.0.0",
"nodemon": "^2.0.15",
"ts-node": "^10.7.0",
"typechain": "^8.0.0",
Expand All @@ -47,6 +48,8 @@
"ethers": "^5.6.2",
"graphql": "^16.3.0",
"graphql-request": "^4.2.0",
"imagemagick": "^0.1.3",
"ipfs-http-client": "^56.0.3",
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i decided to use this just because its was easier for me, happy to use non sugar client, i see you mostly interact with axios

"knex": "^1.0.7",
"knex-dump": "^1.0.11",
"lodash": "^4.17.21",
Expand All @@ -56,6 +59,7 @@
"prompt": "^1.2.2",
"slugify": "^1.6.5",
"sqlite3": "^5.0.3",
"temp-dir": "^2.0.0",
"yargs": "^17.4.0"
}
}
}
1 change: 1 addition & 0 deletions src/clients/ethereum.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ const init = async (): Promise<EthClient> => {
const iface = new ethers.utils.Interface(MetaABI.abi);
return events.map((event: ethers.Event) => ({
...iface.parseLog(event),
logIndex: BigNumber.from(event.logIndex).toString(),
blockNumber: BigNumber.from(event.blockNumber).toString(),
blockHash: event.blockHash,
address: event.address
Expand Down
78 changes: 71 additions & 7 deletions src/clients/sound.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
import { gql, GraphQLClient } from 'graphql-request';

import { formatAddress } from '../types/address';
import { ERC721NFT } from '../types/erc721nft';

const soundAPI = new GraphQLClient(
'https://api.sound.xyz/graphql',
);

const mapAPITrackToTrackID = (apiTrack: any): string => {
if(!apiTrack) {
if (!apiTrack) {
throw new Error('Missing sound.xyz api track');
}
if(!apiTrack.artist || !apiTrack.artist.artistContractAddress) {
if (!apiTrack.artist || !apiTrack.artist.artistContractAddress) {
throw new Error('Missing sound.xyz api track artist');
}
if(!apiTrack.editionId) {
if (!apiTrack.editionId) {
throw new Error('Missing sound.xyz api track editionId');
}
return `ethereum/${formatAddress(apiTrack.artist.artistContractAddress)}/${apiTrack.editionId}`
Expand Down Expand Up @@ -44,10 +45,10 @@ const init = async () => {
};
const getAllMintedReleasesFunction = async (
): Promise<any[]> => {
const { getAllMintedReleases } = await soundAPI.request(
const { allMintedReleases } = await soundAPI.request(
gql`
{
getAllMintedReleases {
allMintedReleases {
id
createdAt
title
Expand Down Expand Up @@ -80,8 +81,69 @@ const init = async () => {
}
`,
);
return getAllMintedReleases;
return allMintedReleases;
};

const getNFTTitle = (nft: ERC721NFT) => {
if (!nft.metadata) {
console.error({ nft })
throw new Error('Missing nft metadata');
}
if (!nft.metadata.name) {
console.error({ nft })
throw new Error('Missing name');
}
const splitName = nft.metadata.name.split('#');
if (splitName.length !== 2) {
console.error({ nft })
throw new Error('Name split by # failed');
}
return splitName[0].trim();
}

const nftMatchesTrack = (nft: ERC721NFT, apiTrack: any) => {
const sameArtistAsNFT = formatAddress(apiTrack.artist.artistContractAddress) === formatAddress(nft.contractAddress);
const sameTrackAsNFT = apiTrack.title.trim() === getNFTTitle(nft);
return sameArtistAsNFT && sameTrackAsNFT;
}

const fetchTracksByNFT = async (nfts: ERC721NFT[]) => {
const apiResponse = await getAllMintedReleasesFunction();
const apiTracks = apiResponse.map(apiTrack => ({
...apiTrack,
trackId: mapAPITrackToTrackID(apiTrack),
}))

const filteredAPITracks = apiTracks.filter(apiTrack => {
const matchedNFT = nfts.find((nft: ERC721NFT) => nftMatchesTrack(nft, apiTrack));
return !!matchedNFT;
});
filteredAPITracks.forEach(apiTrack => {
if (apiTrack.tracks.length > 1) {
return { isError: true, error: new Error('Sound release with multiple tracks not yet implemented') };
}
});
const audioAPITrackPromises = filteredAPITracks.map(async apiTrack => {
return {
...apiTrack,
tracks: [{
...apiTrack.tracks[0],
audio: await audioFromTrack(apiTrack.tracks[0].id),
}]
};
});
const audioAPITracks = await Promise.all(audioAPITrackPromises);
return nfts.reduce((accum, nft) => {
const nftTrack = audioAPITracks.find(track => nftMatchesTrack(nft, track));
if (!nftTrack || !nftTrack.trackId) {
console.dir({ nftTrack, nft })
throw new Error('No track found for NFT')
}
accum[nft.id] = nftTrack.trackId;
return accum;
}, {} as any);
};

const fetchTracksByTrackId = async (trackIds: string[]) => {
const apiResponse = await getAllMintedReleasesFunction();
const apiTracks = apiResponse.map(apiTrack => ({
Expand All @@ -106,10 +168,12 @@ const init = async () => {
const audioAPITracks = await Promise.all(audioAPITrackPromises);
return audioAPITracks;
};

return {
audioFromTrack,
getAllMintedReleases: getAllMintedReleasesFunction,
fetchTracksByTrackId
fetchTracksByTrackId,
fetchTracksByNFT
};
}

Expand Down
13 changes: 8 additions & 5 deletions src/db/db.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import { Knex } from 'knex';

import { Cursor } from '../types/trigger'

export enum Table {
platforms = 'platforms',
erc721nfts = 'erc721nfts',
erc721Transfers = 'erc721Transfers',
artists = 'artists',
artistProfiles = 'artistProfiles',
processedTracks = 'processedTracks',
Expand All @@ -13,14 +15,15 @@ export enum Table {
erc721nfts_processedTracks = 'erc721nfts_processedTracks',
erc721nftProcessErrors = 'erc721nftProcessErrors',
ipfsPins = 'ipfsPins',
processedArtworks = 'processedArtworks'
}

export type WhereFunc = 'where'
| 'whereNull'
| 'andWhere'
| 'whereJsonPath'
| 'whereNotNull'
| 'whereIn';
| 'whereNull'
| 'andWhere'
| 'whereJsonPath'
| 'whereNotNull'
| 'whereIn';

export type WhereField = 'and';

Expand Down
25 changes: 25 additions & 0 deletions src/db/migrations/12-track-all-erc721-transfers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { Knex } from 'knex';

import { Table } from '../db';

const tableConfig =
{
name: Table.erc721Transfers, create: (table: Knex.CreateTableBuilder) => {
table.string('id').primary();
table.datetime('createdAtTime', { precision: 3 });
table.bigint('createdAtEthereumBlockNumber');
table.string('from');
table.string('to');
table.string('contractAddress');
table.string('tokenId');
}
};

export const up = async (knex: Knex) => {
await knex.schema.createTable(tableConfig.name, tableConfig.create);
await knex.raw(`GRANT SELECT ON "${Table.erc721Transfers}" TO ${process.env.POSTGRES_USERNAME_OPEN}`);
};

exports.down = async (knex: Knex) => {
await knex.schema.dropTable(tableConfig.name);
}
19 changes: 19 additions & 0 deletions src/db/migrations/20220528044344_processedArtworks.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import { Knex } from 'knex';
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i see u created this migration properly with timestamp - need to rename the other migrations to have timestamp too, as they're currently just in numeric order so could clash with timestamp ones - will do before merging this PR

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

haha ye i saw, was going to ask if i need to make mine the custom numbers


import { Table } from '../db';

export const up = async (knex: Knex) => {
console.log('Running create contracts bootstrap');
await knex.schema.createTable(Table.processedArtworks, (table: Knex.CreateTableBuilder) => {
table.increments('id');
Copy link
Contributor

@musnit musnit May 30, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hmm not sure if should have an increment id? trackId should be unique could just be used as the primary key. we do just use this in other places - don't think there are any autoincrementing ids anywhere else.

one of the motivation & requirements for no autoincrementing ids is to ensure that if we open source this and someone else runs it with their own endpoint, their DB looks the same and has the same ids as ours. With autoincrementing ids, this can't be guaranteed, for example consider each of the below scenarios:

  • we run the db
  • we add sound, catalog
  • we process sound tracks + catalog tracks on tuesday
  • on tuesday process sound tracks+catalog tracks from tuesday and we add noizd and process noizd tracks from monday and tuesday
  • on weds we process tracks from all 3

2 weeks later, someone else runs the thing:

  • they process all sound+catalog+noizd tracks from tuesday and wednesday

the other person will end up with a different order of IDs, since our initial run had the monday noizd tracks mixed in.

the current design has a nice property across all tables where the order of all operations in the pipeline is basically irrelevant and 2 different people running it will eventually converge on to mostly the same state and same db contents (with some minor buggy exceptions probably)

so this also means that if we add a peer-to-peer network at some point in future where different nodes share and sync their data, it will be easy to do so if everyone is on the same page about ordering, as there will be no need for debate or consensus between nodes on ordering.

(this motivation/requirement may not be that important tbh, could be a bit overkill/premature optimization, but given we've got this property already and it's easy to preserve for now, i think worth preserving)

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ye i figured this would be a problem based on the other code that i read through :). was being a bit lazy. i cant use trackId since there are multiple images per track but i could follow your format that makes ids deterministic

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ye i mean it may make sense to actually process images from the nft directly rather than from the track, and use the nft id too. would need to handle the case where multiple nfts all have the same image tho.

table.string('error');
table.string('size');
table.string('trackId');
table.string('cid');
});
await knex.raw(`GRANT SELECT ON "${Table.processedArtworks}" TO ${process.env.POSTGRES_USERNAME_OPEN}`);
};

exports.down = async (knex: Knex) => {
await knex.schema.dropTable(Table.processedArtworks);
}
8 changes: 6 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,17 @@ import { Table } from './db/db';
import db from './db/sql-db';
import { addMetadataIPFSHashProcessor } from './processors/default/addMetadataIPFSHash';
import { addMetadataObjectProcessor } from './processors/default/addMetadataObject';
import { addTimestampToERC721NFTs } from './processors/default/addTimestampToERC721NFTs';
import { addTimestampToERC721NFTs, addTimestampToERC721Transfers } from './processors/default/addTimestampToERC721NFTs';
import { categorizeZora } from './processors/default/categorizeZora';
import { createERC721ContractFromFactoryProcessor } from './processors/default/createERC721ContractFromFactoryProcessor';
import { createERC721NFTsFromTransfersProcessor } from './processors/default/createERC721NFTsFromTransfersProcessor';
import { createProcessedTracksFromAPI } from './processors/default/createProcessedTracksFromAPI';
import { stripIgnoredNFTs, stripNonAudio } from './processors/default/deleter';
import { getERC721ContractFieldsProcessor } from './processors/default/getERC721ContractFieldsProcessor';
import { getERC721TokenFieldsProcessor } from './processors/default/getERC721TokenFieldsProcessor';
import { ipfsAudioPinner, ipfsArtworkPinner } from './processors/default/ipfs';
import { ipfsAudioPinner, ipfsArtworkPinner, ipfsProcessedArtworksPinner } from './processors/default/ipfs';
import { processPlatformTracks } from './processors/default/processPlatformTracks';
import { processTrackArtworks } from './processors/default/processTrackArtworks';
import { runProcessors } from './runner';
import { ERC721Contract, FactoryContract } from './types/ethereum';
import { MusicPlatform } from './types/platform';
Expand All @@ -33,6 +34,7 @@ const PROCESSORS = (erc721Contracts: ERC721Contract[], factoryContracts: Factory
getERC721ContractFieldsProcessor,
erc721TransferProcessors,
stripIgnoredNFTs,
addTimestampToERC721Transfers,
addTimestampToERC721NFTs,
getERC721TokenFieldsProcessor(erc721ContractsByAddress),
addMetadataIPFSHashProcessor(erc721ContractsByAddress),
Expand All @@ -41,8 +43,10 @@ const PROCESSORS = (erc721Contracts: ERC721Contract[], factoryContracts: Factory
categorizeZora,
...platformTrackProcessors,
createProcessedTracksFromAPI('noizd'), //TODO: noizd here is being used both as platformId and MusicPlatformType. Need to bring in the full noizd platform object here and avoid mixing them
processTrackArtworks,
ipfsAudioPinner,
ipfsArtworkPinner,
ipfsProcessedArtworksPinner,
]
};

Expand Down
29 changes: 18 additions & 11 deletions src/processors/default/addTimestampToERC721NFTs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,28 +2,35 @@ import _ from 'lodash';

import { Table } from '../../db/db';
import { missingCreatedAtTime } from '../../triggers/missing';
import { ERC721NFT } from '../../types/erc721nft';
import { Clients } from '../../types/processor';
import { Record } from '../../types/record';

const processorFunction = async (items: ERC721NFT, clients: Clients) => {
const nftsByBlockNumber = _.groupBy(items, 'createdAtEthereumBlockNumber');
const blockNumbers = Object.keys(nftsByBlockNumber);
const processorFunction = (table: Table) => async (items: Record, clients: Clients) => {
const recordsByBlockNumber = _.groupBy(items, 'createdAtEthereumBlockNumber');
const blockNumbers = Object.keys(recordsByBlockNumber);
console.log(`Processing until block ${blockNumbers[blockNumbers.length - 1]}`)
const timestampsResponse = await clients.blocks.fetchBlockTimestamps(blockNumbers);
const responseByBlockNumber = _.keyBy(timestampsResponse, 'number');
const nftUpdates: Partial<ERC721NFT>[] = [];
const recordUpdates: Partial<Record>[] = [];
blockNumbers.forEach((blockNumber) => {
const timestampMillis = BigInt(responseByBlockNumber[blockNumber].timestamp) * BigInt(1000);
const nfts: ERC721NFT[] = nftsByBlockNumber[blockNumber] as any;
nfts.forEach(nft => nftUpdates.push({
id: nft.id,
const records: Record[] = recordsByBlockNumber[blockNumber] as any;
records.forEach(record => recordUpdates.push({
id: record.id,
createdAtTime: new Date(parseInt(timestampMillis.toString())),
}));
})
await clients.db.update(Table.erc721nfts, nftUpdates);
await clients.db.update(table, recordUpdates);
};

export const addTimestampToERC721NFTs = {
name: 'addTimestampToERC721NFTs',
trigger: missingCreatedAtTime,
processorFunction: processorFunction,
trigger: missingCreatedAtTime(Table.erc721nfts),
processorFunction: processorFunction(Table.erc721nfts),
};

export const addTimestampToERC721Transfers = {
name: 'addTimestampToERC721Transfers',
trigger: missingCreatedAtTime(Table.erc721Transfers),
processorFunction: processorFunction(Table.erc721Transfers),
};
16 changes: 14 additions & 2 deletions src/processors/default/createERC721NFTsFromTransfersProcessor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,25 +4,36 @@ import _ from 'lodash';
import { Table } from '../../db/db';
import { newERC721Transfers } from '../../triggers/newNFTContractEvent';
import { formatAddress } from '../../types/address';
import { ERC721NFT } from '../../types/erc721nft';
import { ERC721NFT, ERC721Transfer } from '../../types/erc721nft';
import { ERC721Contract, ETHEREUM_NULL_ADDRESS, NFTContractTypes } from '../../types/ethereum';
import { Clients, Processor } from '../../types/processor';
import { Cursor } from '../../types/trigger';

const NAME = 'createERC721NFTsFromTransfers';

const CHAIN = 'ethereum';

const processorFunction = (contracts: ERC721Contract[]) =>
async ({ newCursor, items }: { newCursor: Cursor, items: ethers.Event[] }, clients: Clients) => {
const contractsByAddress = _.keyBy(contracts, 'address');
const newNFTs: Partial<ERC721NFT>[] = [];
const updates: Partial<ERC721NFT>[] = [];
const transfers: Partial<ERC721Transfer>[] = [];
items.forEach((item): Partial<ERC721NFT> | undefined => {
const address = item.address;
const contract = contractsByAddress[address];
const contractTypeName = contract.contractType;
const contractType = NFTContractTypes[contractTypeName];
const tokenId = BigInt((item.args!.tokenId as BigNumber).toString());
const newMint = item.args!.from === ETHEREUM_NULL_ADDRESS;
transfers.push({
id: `${CHAIN}/${item.blockNumber}/${item.logIndex}`,
contractAddress: formatAddress(contract.address),
from: item.args!.from,
to: item.args!.to,
tokenId,
createdAtEthereumBlockNumber: '' + item.blockNumber,
});
if (!newMint) {
updates.push({
id: contractType.buildNFTId(contract.address, tokenId),
Expand All @@ -39,8 +50,9 @@ const processorFunction = (contracts: ERC721Contract[]) =>
owner: item.args!.to
});
});
await clients.db.update(Table.erc721nfts, updates);
await clients.db.insert(Table.erc721Transfers, transfers);
await clients.db.insert(Table.erc721nfts, newNFTs.filter(n => !!n));
await clients.db.update(Table.erc721nfts, updates);
await clients.db.updateProcessor(NAME, newCursor);
};

Expand Down
Loading