diff --git a/backend/app.js b/backend/app.js index 02a424e..d2099a7 100644 --- a/backend/app.js +++ b/backend/app.js @@ -1,39 +1,51 @@ -import 'dotenv/config' -import express from 'express' -import cors from 'cors' -import ipfsRoutes from './src/routes/ipfs.js' -import nftRoutes from './src/routes/nfts.js' +import 'dotenv/config'; +import express from 'express'; +import cors from 'cors'; +import { graphqlHTTP } from 'express-graphql'; +import { schema } from './src/graphql/schema.js'; // Import GraphQL schema +import { graphqlUploadExpress } from 'graphql-upload'; -const app = express() -const port = process.env.PORT || 3000 +const app = express(); +const port = process.env.PORT || 3000; -app.use(express.json()) +// Middleware to parse JSON +app.use(express.json()); -// Allow all origins for now to simplify the frontend setup +// Enable file uploads for GraphQL +app.use(graphqlUploadExpress()); + +// Allow all origins for now to simplify frontend setup app.use( cors({ origin: '*', - methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'], + methods: ['GET', 'POST', 'OPTIONS'], allowedHeaders: ['Content-Type', 'Authorization'], - }) -) +); -app.use('/ipfs', ipfsRoutes) -app.use('/nfts', nftRoutes) +// Health check endpoint +app.get('/health', (req, res) => { + res.status(200).json({ status: 'ok' }); +}); -// Error handling middleware to ensure CORS headers are sent even on errors +// Set up the GraphQL server +app.use( + '/graphql', + graphqlHTTP({ + schema, // GraphQL schema imported from the project + graphiql: process.env.NODE_ENV !== 'production', // Enable GraphiQL in development + }) +); + +// Error handling middleware to ensure consistent error responses app.use((err, req, res, next) => { - res.header('Access-Control-Allow-Origin', '*') - res.status(err.status || 500).json({ error: err.message }) -}) -// Basic health check endpoint -app.get('/health', (req, res) => { - res.status(200).json({ status: 'ok' }) -}) + res.header('Access-Control-Allow-Origin', '*'); + res.status(err.status || 500).json({ error: err.message }); +}); +// Start the server app.listen(port, () => { - console.log(`Server is running on port ${port}`) -}) + console.log(`GraphQL server is running on http://localhost:${port}/graphql`); +}); -export default app \ No newline at end of file +export default app; diff --git a/backend/package-lock.json b/backend/package-lock.json index 20946a0..c25b7a0 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -10,9 +10,11 @@ "license": "ISC", "dependencies": { "cors": "^2.8.5", - "dotenv": "^16.4.5", + "dotenv": "^16.4.7", "ethers": "^6.13.4", "express": "^4.21.1", + "express-graphql": "^0.12.0", + "graphql-upload": "^11.0.0", "multer": "^1.4.5-lts.1", "nodemon": "^3.1.7", "pako": "^2.1.0", @@ -515,6 +517,25 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/dicer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/dicer/-/dicer-0.3.0.tgz", + "integrity": "sha512-MdceRRWqltEG2dZqO769g27N/3PXfcKl04VhYnBlo2YhH7zPi88VebsjTKclaOyiuMaGU72hTfw3VkUitGcVCA==", + "dependencies": { + "streamsearch": "0.1.2" + }, + "engines": { + "node": ">=4.5.0" + } + }, + "node_modules/dicer/node_modules/streamsearch": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz", + "integrity": "sha512-jos8u++JKm0ARcSUTAZXOVC0mSox7Bhn6sBgty73P1f3JGf7yG2clTbBNHUdde/kdvP2FESam+vM6l8jBrNxHA==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/dns-packet": { "version": "5.6.1", "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.6.1.tgz", @@ -528,9 +549,9 @@ } }, "node_modules/dotenv": { - "version": "16.4.5", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", - "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "version": "16.4.7", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz", + "integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==", "license": "BSD-2-Clause", "engines": { "node": ">=12" @@ -675,6 +696,68 @@ "node": ">= 0.10.0" } }, + "node_modules/express-graphql": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/express-graphql/-/express-graphql-0.12.0.tgz", + "integrity": "sha512-DwYaJQy0amdy3pgNtiTDuGGM2BLdj+YO2SgbKoLliCfuHv3VVTt7vNG/ZqK2hRYjtYHE2t2KB705EU94mE64zg==", + "deprecated": "This package is no longer maintained. We recommend using `graphql-http` instead. Please consult the migration document https://github.com/graphql/graphql-http#migrating-express-grpahql.", + "license": "MIT", + "dependencies": { + "accepts": "^1.3.7", + "content-type": "^1.0.4", + "http-errors": "1.8.0", + "raw-body": "^2.4.1" + }, + "engines": { + "node": ">= 10.x" + }, + "peerDependencies": { + "graphql": "^14.7.0 || ^15.3.0" + } + }, + "node_modules/express-graphql/node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express-graphql/node_modules/http-errors": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.0.tgz", + "integrity": "sha512-4I8r0C5JDhT5VkvI47QktDW75rNlGVsUf/8hzjCC/wkWI/jdTRmBb9aI7erSG82r1bjKY3F6k28WnsVxB1C73A==", + "license": "MIT", + "dependencies": { + "depd": "~1.1.2", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express-graphql/node_modules/statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express-graphql/node_modules/toidentifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", + "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, "node_modules/fetch-blob": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", @@ -792,6 +875,15 @@ "node": ">= 0.6" } }, + "node_modules/fs-capacitor": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/fs-capacitor/-/fs-capacitor-6.2.0.tgz", + "integrity": "sha512-nKcE1UduoSKX27NSZlg879LdQc94OtbOsEmKMN2MBNudXREvijRKx2GEBsTMTfws+BrbkJoEuynbGSVRSpauvw==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -858,6 +950,83 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/graphql": { + "version": "15.10.1", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-15.10.1.tgz", + "integrity": "sha512-BL/Xd/T9baO6NFzoMpiMD7YUZ62R6viR5tp/MULVEnbYJXZA//kRNW7J0j1w/wXArgL0sCxhDfK5dczSKn3+cg==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/graphql-upload": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/graphql-upload/-/graphql-upload-11.0.0.tgz", + "integrity": "sha512-zsrDtu5gCbQFDWsNa5bMB4nf1LpKX9KDgh+f8oL1288ijV4RxeckhVozAjqjXAfRpxOHD1xOESsh6zq8SjdgjA==", + "license": "MIT", + "dependencies": { + "busboy": "^0.3.1", + "fs-capacitor": "^6.1.0", + "http-errors": "^1.7.3", + "isobject": "^4.0.0", + "object-path": "^0.11.4" + }, + "engines": { + "node": "^10.13.0 || ^12.0.0 || >= 13.7.0" + }, + "funding": { + "url": "https://github.com/sponsors/jaydenseric" + }, + "peerDependencies": { + "graphql": "0.13.1 - 15" + } + }, + "node_modules/graphql-upload/node_modules/busboy": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-0.3.1.tgz", + "integrity": "sha512-y7tTxhGKXcyBxRKAni+awqx8uqaJKrSFSNFSeRG5CsWNdmy2BIK+6VGWEW7TZnIO/533mtMEA4rOevQV815YJw==", + "dependencies": { + "dicer": "0.3.0" + }, + "engines": { + "node": ">=4.5.0" + } + }, + "node_modules/graphql-upload/node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/graphql-upload/node_modules/http-errors": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", + "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", + "license": "MIT", + "dependencies": { + "depd": "~1.1.2", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/graphql-upload/node_modules/statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -1064,6 +1233,15 @@ "node": ">=12" } }, + "node_modules/isobject": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-4.0.0.tgz", + "integrity": "sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -1314,6 +1492,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/object-path": { + "version": "0.11.8", + "resolved": "https://registry.npmjs.org/object-path/-/object-path-0.11.8.tgz", + "integrity": "sha512-YJjNZrlXJFM42wTBn6zgOJVar9KFJvzx6sTWDte8sWZF//cnjl0BxHNpfZx+ZffXX63A9q0b1zsFiBX4g4X5KA==", + "license": "MIT", + "engines": { + "node": ">= 10.12.0" + } + }, "node_modules/on-finished": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", diff --git a/backend/package.json b/backend/package.json index 6f206f3..56e79a3 100644 --- a/backend/package.json +++ b/backend/package.json @@ -11,9 +11,11 @@ "license": "ISC", "dependencies": { "cors": "^2.8.5", - "dotenv": "^16.4.5", + "dotenv": "^16.4.7", "ethers": "^6.13.4", "express": "^4.21.1", + "express-graphql": "^0.12.0", + "graphql-upload": "^11.0.0", "multer": "^1.4.5-lts.1", "nodemon": "^3.1.7", "pako": "^2.1.0", diff --git a/backend/src/controllers/ipfsController.js b/backend/src/controllers/ipfsController.js index 7f482f1..138db68 100644 --- a/backend/src/controllers/ipfsController.js +++ b/backend/src/controllers/ipfsController.js @@ -1,37 +1,77 @@ import { - testIpfs, - uploadMetadataToIPFS, - uploadToIPFS -} from '../services/pinataService.js' - -export const uploadNftToIpfs = async (req, res) => { - try { - if (!req.file) { - res.status(400).json({ error: 'No file uploaded' }) + testIpfs, + uploadMetadataToIPFS, + uploadToIPFS + } from '../services/pinataService.js'; + + export const uploadNftToIpfs = async ({ createReadStream, filename }) => { + try { + // Test IPFS connectivity + await testIpfs(); + + // Create a buffer from the uploaded file stream + const fileBuffer = await streamToBuffer(createReadStream()); + + // Upload the file to IPFS + const ipfsHash = await uploadToIPFS(filename, fileBuffer); + + return { + ipfsHash, + success: true, + message: 'File successfully uploaded to IPFS', + }; + } catch (err) { + console.error(err); + return { + ipfsHash: null, + success: false, + message: `Error uploading file to IPFS: ${err.message}`, + }; } - const testing = await testIpfs() - const fileName = req.file.originalname - const fileBuffer = req.file.buffer - const ipfsHash = await uploadToIPFS(fileName, fileBuffer) - res.status(200).json({ ipfsHash }) - } catch (err) { - console.error(err) - res.status(500).json({ error: err }) - } -} -export const uploadMetadataToIpfs = async (req, res) => { - try { - const { name, description, image } = JSON.parse(req.body.metadata) - if (!name || !description || !image) { - res.status(400).json({ error: 'Missing metadata' }) - return + }; + + export const uploadMetadataToIpfs = async ({ metadata }) => { + try { + const { name, description, image } = metadata; + + // Validate metadata fields + if (!name || !description || !image) { + return { + ipfsHash: null, + success: false, + message: 'Missing metadata fields: name, description, or image', + }; + } + + const data = { name, description, image }; + + // Test IPFS connectivity + await testIpfs(); + + // Upload the metadata to IPFS + const ipfsHash = await uploadMetadataToIPFS(data); + + return { + ipfsHash, + success: true, + message: 'Metadata successfully uploaded to IPFS', + }; + } catch (err) { + console.error(err); + return { + ipfsHash: null, + success: false, + message: `Error uploading metadata to IPFS: ${err.message}`, + }; } - const data = { name, description, image } - await testIpfs() - const ipfsHash = await uploadMetadataToIPFS(data) - res.status(200).json({ ipfsHash }) - } catch (err) { - console.error(err) - res.status(500).json({ error: err }) - } -} + }; + + // Helper function to convert stream to buffer + const streamToBuffer = async (stream) => { + const chunks = []; + for await (const chunk of stream) { + chunks.push(chunk); + } + return Buffer.concat(chunks); + }; + \ No newline at end of file diff --git a/backend/src/controllers/nftController.js b/backend/src/controllers/nftController.js index b86cef4..2ca9ef0 100644 --- a/backend/src/controllers/nftController.js +++ b/backend/src/controllers/nftController.js @@ -1,59 +1,68 @@ -import { ethers } from 'ethers' -import { getImage, getMetaData } from '../services/pinataService.js' -import { getMarketplaceContract, getNFTContract } from '../utils/getContract.js' -import pako from 'pako' -import { arrayBufferToBase64 } from '../utils/toBase64.js' +import { ethers } from 'ethers'; +import { getImage, getMetaData } from '../services/pinataService.js'; +import { getMarketplaceContract, getNFTContract } from '../utils/getContract.js'; +import pako from 'pako'; +import { arrayBufferToBase64 } from '../utils/toBase64.js'; -export const getNfts = async (req, res) => { - const marketplaceContract = getMarketplaceContract() - const items = await marketplaceContract.fetchMarketItems() - // Fetch token URIs for each item - const itemsWithMetadata = await Promise.all( - items.map(async (item) => { - const nftContract = getNFTContract() - try { - const tokenURI = await nftContract.tokenURI(item.tokenId) - //dbg - // console.log(`Token URI for item ${item.tokenId}:`, tokenURI) - const metadata = await getMetaData(tokenURI.toString()) - // const metadata = await getMetaData(tokenURI) - // console.log(metadata, 'metadata') - const imageData = await getImage((metadata.image).toString()) - // Convert Blob to ArrayBuffer - const arrayBuffer = await imageData.image.arrayBuffer() +export const getNfts = async () => { + try { + const marketplaceContract = getMarketplaceContract(); + const items = await marketplaceContract.fetchMarketItems(); - // Compress the ArrayBuffer using pako - const compressed = pako.deflate(arrayBuffer) + // Fetch token URIs and metadata for each item + const itemsWithMetadata = await Promise.all( + items.map(async (item) => { + const nftContract = getNFTContract(); + try { + const tokenURI = await nftContract.tokenURI(item.tokenId); - // Convert the compressed data to Base64 - const base64Data = arrayBufferToBase64(compressed) - // console.log(image, 'image') - const data = { - ...item, - metadata, - img: { data: base64Data, contentType: imageData.contentType } + // Fetch metadata from IPFS + const metadata = await getMetaData(tokenURI.toString()); + + // Fetch and process image data + const imageData = await getImage(metadata.image.toString()); + const arrayBuffer = await imageData.image.arrayBuffer(); + const compressed = pako.deflate(arrayBuffer); + const base64Data = arrayBufferToBase64(compressed); + + return { + ...item, + metadata, + img: { data: base64Data, contentType: imageData.contentType }, + }; + } catch (error) { + console.error( + `Error fetching tokenURI or metadata for item ${item.tokenId}:`, + error + ); + return { ...item, metadata: null }; } - // console.log(data) - return data - } catch (error) { - console.error( - `Error fetching tokenURI for item ${item.tokenId}:`, - error - ) - return { ...item, metadata: null } - } - }) - ) - const formattedItems = itemsWithMetadata.map((item) => ({ - itemId: item[0].toString(), - nftContract: item[1], - tokenId: item[2].toString(), - seller: item[3], - owner: item[4], - price: ethers.formatEther(item[5]), - sold: item[6], - metadata: item.metadata, - image: item.img - })) - res.status(200).json({ data: formattedItems }) -} + }) + ); + + // Format the items with metadata for the GraphQL response + const formattedItems = itemsWithMetadata.map((item) => ({ + itemId: item[0].toString(), + nftContract: item[1], + tokenId: item[2].toString(), + seller: item[3], + owner: item[4], + price: ethers.formatEther(item[5]), + sold: item[6], + metadata: item.metadata, + image: item.img, + })); + + return { + success: true, + data: formattedItems, + }; + } catch (error) { + console.error('Error fetching NFTs:', error); + return { + success: false, + message: `Error fetching NFTs: ${error.message}`, + data: [], + }; + } +}; diff --git a/backend/src/graphql/ipfsResolvers.js b/backend/src/graphql/ipfsResolvers.js new file mode 100644 index 0000000..bad8358 --- /dev/null +++ b/backend/src/graphql/ipfsResolvers.js @@ -0,0 +1,66 @@ +import { testIpfs, uploadToIPFS, uploadMetadataToIPFS } from '../../services/pinataService.js'; + +export const ipfsResolvers = { + Mutation: { + /** + * Resolver for uploading an image to IPFS + * @param {Object} _ - Parent object (unused here) + * @param {Object} file - File input from the mutation + * @returns {Object} - IPFS response with the uploaded file hash + */ + uploadImage: async (_, { file }) => { + try { + // Destructure file properties from the upload + const { createReadStream, filename } = await file; + const fileStream = createReadStream(); + + // Read the file stream into a buffer + const chunks = []; + for await (let chunk of fileStream) { + chunks.push(chunk); + } + const fileBuffer = Buffer.concat(chunks); + + // Test IPFS connection and upload the file + await testIpfs(); + const ipfsHash = await uploadToIPFS(filename, fileBuffer); + + // Return the IPFS hash as the response + return { ipfsHash }; + } catch (err) { + console.error('Error uploading image to IPFS:', err); + throw new Error('Failed to upload image to IPFS'); + } + }, + + /** + * Resolver for uploading metadata to IPFS + * @param {Object} _ - Parent object (unused here) + * @param {Object} metadata - Metadata input from the mutation + * @returns {Object} - IPFS response with the uploaded metadata hash + */ + uploadMetadata: async (_, { metadata }) => { + try { + // Destructure metadata fields + const { name, description, image } = metadata; + + // Validate required metadata fields + if (!name || !description || !image) { + throw new Error('Missing metadata fields: name, description, or image'); + } + + const data = { name, description, image }; + + // Test IPFS connection and upload metadata + await testIpfs(); + const ipfsHash = await uploadMetadataToIPFS(data); + + // Return the IPFS hash as the response + return { ipfsHash }; + } catch (err) { + console.error('Error uploading metadata to IPFS:', err); + throw new Error('Failed to upload metadata to IPFS'); + } + }, + }, +}; diff --git a/backend/src/graphql/nftResolvers.js b/backend/src/graphql/nftResolvers.js new file mode 100644 index 0000000..ca69c29 --- /dev/null +++ b/backend/src/graphql/nftResolvers.js @@ -0,0 +1,72 @@ +import { ethers } from 'ethers'; +import { getImage, getMetaData } from '../../services/pinataService.js'; +import { getMarketplaceContract, getNFTContract } from '../../utils/getContract.js'; +import pako from 'pako'; +import { arrayBufferToBase64 } from '../../utils/toBase64.js'; + +export const nftResolvers = { + Query: { + /** + * Resolver for fetching NFT metadata and images from the marketplace + * @returns {Array} - List of NFTs with metadata and image data + */ + getNFTs: async () => { + try { + // Get the marketplace contract + const marketplaceContract = getMarketplaceContract(); + + // Fetch all market items + const items = await marketplaceContract.fetchMarketItems(); + + // Process each item to fetch its metadata and image + const itemsWithMetadata = await Promise.all( + items.map(async (item) => { + try { + const nftContract = getNFTContract(); + const tokenURI = await nftContract.tokenURI(item.tokenId); + + // Fetch metadata and image from IPFS + const metadata = await getMetaData(tokenURI.toString()); + const imageData = await getImage(metadata.image.toString()); + + // Convert Blob to ArrayBuffer + const arrayBuffer = await imageData.image.arrayBuffer(); + + // Compress the ArrayBuffer using pako + const compressed = pako.deflate(arrayBuffer); + + // Convert the compressed data to Base64 + const base64Data = arrayBufferToBase64(compressed); + + // Return the complete item data + return { + ...item, + metadata, + img: { data: base64Data, contentType: imageData.contentType }, + }; + } catch (error) { + console.error(`Error processing item ${item.tokenId}:`, error); + return { ...item, metadata: null, img: null }; + } + }) + ); + + // Format the items with metadata and images for the response + return itemsWithMetadata.map((item) => ({ + itemId: item[0].toString(), + nftContract: item[1], + tokenId: item[2].toString(), + seller: item[3], + owner: item[4], + price: ethers.formatEther(item[5]), + sold: item[6], + metadata: item.metadata, + image: item.img, + })); + } catch (error) { + console.error('Error fetching NFTs:', error); + throw new Error('Failed to fetch NFTs'); + } + }, + }, +}; diff --git a/backend/src/graphql/schema.js b/backend/src/graphql/schema.js new file mode 100644 index 0000000..6636821 --- /dev/null +++ b/backend/src/graphql/schema.js @@ -0,0 +1,78 @@ +import { makeExecutableSchema } from '@graphql-tools/schema'; +import { GraphQLUpload } from 'graphql-upload'; +import { uploadNftToIpfs, uploadMetadataToIpfs } from '../controllers/ipfsController.js'; + +// Define the GraphQL schema +const typeDefs = ` + scalar Upload + + type UploadResponse { + ipfsHash: String + success: Boolean + message: String + } + + type Query { + # Dummy query to check API status + _: String + } + + type Mutation { + uploadImage(file: Upload!): UploadResponse + uploadMetaData(file: Upload!): UploadResponse + } +`; + +// Define the resolvers +const resolvers = { + Upload: GraphQLUpload, + + Query: { + _: () => "GraphQL API is running", + }, + + Mutation: { + uploadImage: async (_, { file }) => { + try { + const { createReadStream, filename } = await file; + const ipfsHash = await uploadNftToIpfs({ createReadStream, filename }); + return { + ipfsHash, + success: true, + message: "File successfully uploaded to IPFS", + }; + } catch (error) { + return { + ipfsHash: null, + success: false, + message: `Failed to upload file: ${error.message}`, + }; + } + }, + uploadMetaData: async (_, { file }) => { + try { + const { createReadStream, filename } = await file; + const ipfsHash = await uploadMetadataToIpfs({ createReadStream, filename }); + return { + ipfsHash, + success: true, + message: "Metadata successfully uploaded to IPFS", + }; + } catch (error) { + return { + ipfsHash: null, + success: false, + message: `Failed to upload metadata: ${error.message}`, + }; + } + }, + }, +}; + +// Create and export the executable schema +const schema = makeExecutableSchema({ + typeDefs, + resolvers, +}); + +export { schema }; diff --git a/backend/src/routes/ipfs.js b/backend/src/routes/ipfs.js deleted file mode 100644 index 3950d0f..0000000 --- a/backend/src/routes/ipfs.js +++ /dev/null @@ -1,18 +0,0 @@ -import express from 'express' -import path from 'path' -import fs from 'fs' -import multer from 'multer' -import { - uploadNftToIpfs, - uploadMetadataToIpfs -} from '../controllers/ipfsController.js' - -const router = express.Router() - -const storage = multer.memoryStorage() - -const upload = multer({ storage: storage }) -// route for issuer to upload document and then uploading it to pinata -router.post('/uploadImage', upload.single('file'), uploadNftToIpfs) -router.post('/uploadMetaData', upload.single('file'), uploadMetadataToIpfs) -export default router diff --git a/backend/src/routes/nfts.js b/backend/src/routes/nfts.js deleted file mode 100644 index 97df3b8..0000000 --- a/backend/src/routes/nfts.js +++ /dev/null @@ -1,8 +0,0 @@ -import express from 'express' -import { getNfts } from '../controllers/nftController.js' - -const router = express.Router() - -// route for issuer to upload document and then uploading it to pinata -router.get('/getNfts', getNfts) -export default router