commit
a41f2b8a4b
|
|
@ -16,4 +16,8 @@ contracts/out
|
|||
subgraph/abis
|
||||
subgraph/build
|
||||
subgraph/generated
|
||||
subgraph/examples/query/.graphclient
|
||||
subgraph/examples/query/.graphclient
|
||||
|
||||
serverless/dist
|
||||
serverless/.serverless
|
||||
serverless/.esbuild
|
||||
16
.eslintrc.js
16
.eslintrc.js
|
|
@ -12,8 +12,22 @@ module.exports = {
|
|||
'./ui/tsconfig.json',
|
||||
'./subgraph/tsconfig.json',
|
||||
'./subgraph/tsconfig.tools.json',
|
||||
'./serverless/tsconfig.json',
|
||||
],
|
||||
},
|
||||
plugins: ['@typescript-eslint', 'prettier'],
|
||||
plugins: ['@typescript-eslint', 'prettier', 'filenames-simple'],
|
||||
root: true,
|
||||
overrides: [
|
||||
{
|
||||
files: ['*.ts'],
|
||||
rules: {
|
||||
'filenames-simple/naming-convention': [
|
||||
'error',
|
||||
{
|
||||
rule: 'kebab-case',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ on:
|
|||
jobs:
|
||||
test-contracts:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
MAINNET_API_KEY: ${{ secrets.MAINNET_API_KEY }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,52 @@
|
|||
name: Deploy site via Fleek
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- 'ui/**'
|
||||
|
||||
jobs:
|
||||
deploy-to-fleek:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
FLEEK_TOKEN: ${{ secrets.FLEEK_TOKEN }}
|
||||
FLEEK_PROJECT_ID: ${{ secrets.FLEEK_PROJECT_ID }}
|
||||
VITE_ALCHEMY_API_KEY: ${{ secrets.VITE_ALCHEMY_API_KEY }}
|
||||
VITE_ALCHEMY_APP_NAME: ${{ secrets.VITE_ALCHEMY_APP_NAME }}
|
||||
VITE_FIREBASE_API_KEY: ${{ secrets.VITE_FIREBASE_API_KEY }}
|
||||
VITE_FIREBASE_AUTH_DOMAIN: ${{ secrets.VITE_FIREBASE_AUTH_DOMAIN }}
|
||||
VITE_FIREBASE_PROJECT_ID: ${{ secrets.VITE_FIREBASE_PROJECT_ID }}
|
||||
VITE_FIREBASE_STORAGE_BUCKET: ${{ secrets.VITE_FIREBASE_STORAGE_BUCKET }}
|
||||
VITE_FIREBASE_MESSAGING_SENDER_ID: ${{ secrets.VITE_FIREBASE_MESSAGING_SENDER_ID }}
|
||||
VITE_FIREBASE_APP_ID: ${{ secrets.VITE_FIREBASE_APP_ID }}
|
||||
VITE_FIREBASE_MEASUREMENT_ID: ${{ secrets.VITE_FIREBASE_MEASUREMENT_ID }}
|
||||
VITE_TWITTER_URL: ${{ secrets.VITE_TWITTER_URL }}
|
||||
VITE_GOERLI_RPC: ${{ secrets.VITE_GOERLI_RPC }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ui
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16
|
||||
|
||||
- name: Install Fleek CLI
|
||||
run: npm i -g @fleekxyz/cli
|
||||
|
||||
- name: Install UI dependencies
|
||||
run: yarn
|
||||
|
||||
- name: Build ui folder
|
||||
run: yarn build
|
||||
|
||||
- name: Build & deploy sites
|
||||
run: fleek sites deploy
|
||||
|
|
@ -36,5 +36,3 @@ jobs:
|
|||
- name: Build Subgraph
|
||||
run: yarn build
|
||||
|
||||
- name: Test Subgraph
|
||||
run: yarn test
|
||||
|
|
|
|||
|
|
@ -9,4 +9,8 @@ contracts/out/**/*
|
|||
subgraph/abis/**/*
|
||||
subgraph/build/**/*
|
||||
subgraph/generated/**/*
|
||||
subgraph/examples/query/.graphclient/**/*
|
||||
subgraph/examples/query/.graphclient/**/*
|
||||
|
||||
serverless/dist/**/*
|
||||
serverless/.serverless/**/*
|
||||
serverless/.esbuild/**/*
|
||||
|
|
@ -1,5 +1,8 @@
|
|||
# The RPC API URL (e.g. https://rpc-mumbai.maticvigil.com https://polygon-mumbai.g.alchemy.com/v2/your-api-key)
|
||||
API_URL=
|
||||
# The RPC API URL (e.g. https://polygon-mainnet.g.alchemy.com/v2/your-api-key)
|
||||
ETH_SEPOLIA_API_URL=
|
||||
POLYGON_API_URL=
|
||||
ETH_MAIN_API_URL=
|
||||
ETH_GOERLI_API_URL=
|
||||
|
||||
# The exported wallet private key (e.g 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80)
|
||||
# You can reach info about how to get a private key from Metamask on https://metamask.zendesk.com/hc/en-us/articles/360015289632-How-to-Export-an-Account-Private-Key
|
||||
|
|
@ -8,5 +11,21 @@ PRIVATE_KEY=
|
|||
# The blocks explorer API (e.g https://mumbai.polygonscan.com/)
|
||||
POLYSCAN_API=https://mumbai.polygonscan.com/
|
||||
|
||||
# The blocks explorer API (e.g https://sepolia.etherscan.io/)
|
||||
ETHERSCAN_API=https://api-sepolia.etherscan.io/
|
||||
|
||||
# Explorer key
|
||||
POLYGONSCAN_KEY=
|
||||
ETHERSCAN_KEY=
|
||||
|
||||
# The address of the deployed contract on the blockchain
|
||||
CONTRACT_ADDRESS=
|
||||
CONTRACT_ADDRESS=
|
||||
|
||||
# Alchemy or Infura API key to fork the mainnet on tests (e.g. https://eth-mainnet.g.alchemy.com/v2/your-api-key)
|
||||
MAINNET_API_KEY=
|
||||
|
||||
# Enable gas report on hardhat tests
|
||||
REPORT_GAS=true
|
||||
|
||||
# The CoinMarketCap API key to get the price of the token for gas report
|
||||
COINMARKETCAP_KEY=
|
||||
|
|
@ -2,10 +2,11 @@
|
|||
cache
|
||||
artifacts
|
||||
deployments/hardhat
|
||||
gas-report
|
||||
|
||||
# Foundry
|
||||
out
|
||||
forge-cache
|
||||
|
||||
# OpenZeppelin
|
||||
.openzeppelin/unknown-*.json
|
||||
.openzeppelin/unknown-*.json
|
||||
|
|
@ -0,0 +1,491 @@
|
|||
{
|
||||
"manifestVersion": "3.2",
|
||||
"admin": {
|
||||
"address": "0x07aA73f07CB86608309a25a41bb02455296ED28B",
|
||||
"txHash": "0x1aad0fc81017a4a12ce168c08ef50e97ed9ccd713a209b0fee0bebf894c646b3"
|
||||
},
|
||||
"proxies": [
|
||||
{
|
||||
"address": "0x8795608346Eb475E42e69F1281008AEAa522479D",
|
||||
"txHash": "0x626662cdb0902646dd70d3ef50abb00c12614d8e572b175f2e45a40a73d4954e",
|
||||
"kind": "transparent"
|
||||
}
|
||||
],
|
||||
"impls": {
|
||||
"0d797e2700f6709b90e0002137bd5fafa5a5728f405046d27d3e8bc86468034c": {
|
||||
"address": "0x03fBB4F0D28f27c33b99F1b80aF679F20cb5E159",
|
||||
"txHash": "0x29e5822d8e44151228816dcf989fb50c9940c163f8eb884393acf4391daf6462",
|
||||
"layout": {
|
||||
"solcVersion": "0.8.12",
|
||||
"storage": [
|
||||
{
|
||||
"label": "_initialized",
|
||||
"offset": 0,
|
||||
"slot": "0",
|
||||
"type": "t_uint8",
|
||||
"contract": "Initializable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol:62",
|
||||
"retypedFrom": "bool"
|
||||
},
|
||||
{
|
||||
"label": "_initializing",
|
||||
"offset": 1,
|
||||
"slot": "0",
|
||||
"type": "t_bool",
|
||||
"contract": "Initializable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol:67"
|
||||
},
|
||||
{
|
||||
"label": "__gap",
|
||||
"offset": 0,
|
||||
"slot": "1",
|
||||
"type": "t_array(t_uint256)50_storage",
|
||||
"contract": "ContextUpgradeable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/utils/ContextUpgradeable.sol:36"
|
||||
},
|
||||
{
|
||||
"label": "__gap",
|
||||
"offset": 0,
|
||||
"slot": "51",
|
||||
"type": "t_array(t_uint256)50_storage",
|
||||
"contract": "ERC165Upgradeable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/utils/introspection/ERC165Upgradeable.sol:41"
|
||||
},
|
||||
{
|
||||
"label": "_name",
|
||||
"offset": 0,
|
||||
"slot": "101",
|
||||
"type": "t_string_storage",
|
||||
"contract": "ERC721Upgradeable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:25"
|
||||
},
|
||||
{
|
||||
"label": "_symbol",
|
||||
"offset": 0,
|
||||
"slot": "102",
|
||||
"type": "t_string_storage",
|
||||
"contract": "ERC721Upgradeable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:28"
|
||||
},
|
||||
{
|
||||
"label": "_owners",
|
||||
"offset": 0,
|
||||
"slot": "103",
|
||||
"type": "t_mapping(t_uint256,t_address)",
|
||||
"contract": "ERC721Upgradeable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:31"
|
||||
},
|
||||
{
|
||||
"label": "_balances",
|
||||
"offset": 0,
|
||||
"slot": "104",
|
||||
"type": "t_mapping(t_address,t_uint256)",
|
||||
"contract": "ERC721Upgradeable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:34"
|
||||
},
|
||||
{
|
||||
"label": "_tokenApprovals",
|
||||
"offset": 0,
|
||||
"slot": "105",
|
||||
"type": "t_mapping(t_uint256,t_address)",
|
||||
"contract": "ERC721Upgradeable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:37"
|
||||
},
|
||||
{
|
||||
"label": "_operatorApprovals",
|
||||
"offset": 0,
|
||||
"slot": "106",
|
||||
"type": "t_mapping(t_address,t_mapping(t_address,t_bool))",
|
||||
"contract": "ERC721Upgradeable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:40"
|
||||
},
|
||||
{
|
||||
"label": "__gap",
|
||||
"offset": 0,
|
||||
"slot": "107",
|
||||
"type": "t_array(t_uint256)44_storage",
|
||||
"contract": "ERC721Upgradeable",
|
||||
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:514"
|
||||
},
|
||||
{
|
||||
"label": "_collectionRolesCounter",
|
||||
"offset": 0,
|
||||
"slot": "151",
|
||||
"type": "t_mapping(t_enum(CollectionRoles)3958,t_uint256)",
|
||||
"contract": "FleekAccessControl",
|
||||
"src": "contracts/FleekAccessControl.sol:58"
|
||||
},
|
||||
{
|
||||
"label": "_collectionRoles",
|
||||
"offset": 0,
|
||||
"slot": "152",
|
||||
"type": "t_mapping(t_enum(CollectionRoles)3958,t_mapping(t_address,t_bool))",
|
||||
"contract": "FleekAccessControl",
|
||||
"src": "contracts/FleekAccessControl.sol:63"
|
||||
},
|
||||
{
|
||||
"label": "_tokenRolesVersion",
|
||||
"offset": 0,
|
||||
"slot": "153",
|
||||
"type": "t_mapping(t_uint256,t_uint256)",
|
||||
"contract": "FleekAccessControl",
|
||||
"src": "contracts/FleekAccessControl.sol:70"
|
||||
},
|
||||
{
|
||||
"label": "_tokenRoles",
|
||||
"offset": 0,
|
||||
"slot": "154",
|
||||
"type": "t_mapping(t_uint256,t_mapping(t_uint256,t_mapping(t_enum(TokenRoles)3960,t_mapping(t_address,t_bool))))",
|
||||
"contract": "FleekAccessControl",
|
||||
"src": "contracts/FleekAccessControl.sol:75"
|
||||
},
|
||||
{
|
||||
"label": "__gap",
|
||||
"offset": 0,
|
||||
"slot": "155",
|
||||
"type": "t_array(t_uint256)49_storage",
|
||||
"contract": "FleekAccessControl",
|
||||
"src": "contracts/FleekAccessControl.sol:178"
|
||||
},
|
||||
{
|
||||
"label": "_paused",
|
||||
"offset": 0,
|
||||
"slot": "204",
|
||||
"type": "t_bool",
|
||||
"contract": "FleekPausable",
|
||||
"src": "contracts/FleekPausable.sol:23"
|
||||
},
|
||||
{
|
||||
"label": "_canPause",
|
||||
"offset": 1,
|
||||
"slot": "204",
|
||||
"type": "t_bool",
|
||||
"contract": "FleekPausable",
|
||||
"src": "contracts/FleekPausable.sol:24"
|
||||
},
|
||||
{
|
||||
"label": "__gap",
|
||||
"offset": 0,
|
||||
"slot": "205",
|
||||
"type": "t_array(t_uint256)49_storage",
|
||||
"contract": "FleekPausable",
|
||||
"src": "contracts/FleekPausable.sol:133"
|
||||
},
|
||||
{
|
||||
"label": "_billings",
|
||||
"offset": 0,
|
||||
"slot": "254",
|
||||
"type": "t_mapping(t_enum(Billing)4925,t_uint256)",
|
||||
"contract": "FleekBilling",
|
||||
"src": "contracts/FleekBilling.sol:31"
|
||||
},
|
||||
{
|
||||
"label": "__gap",
|
||||
"offset": 0,
|
||||
"slot": "255",
|
||||
"type": "t_array(t_uint256)49_storage",
|
||||
"contract": "FleekBilling",
|
||||
"src": "contracts/FleekBilling.sol:81"
|
||||
},
|
||||
{
|
||||
"label": "_accessPoints",
|
||||
"offset": 0,
|
||||
"slot": "304",
|
||||
"type": "t_mapping(t_string_memory_ptr,t_struct(AccessPoint)4457_storage)",
|
||||
"contract": "FleekAccessPoints",
|
||||
"src": "contracts/FleekAccessPoints.sol:64"
|
||||
},
|
||||
{
|
||||
"label": "_autoApproval",
|
||||
"offset": 0,
|
||||
"slot": "305",
|
||||
"type": "t_mapping(t_uint256,t_bool)",
|
||||
"contract": "FleekAccessPoints",
|
||||
"src": "contracts/FleekAccessPoints.sol:66"
|
||||
},
|
||||
{
|
||||
"label": "__gap",
|
||||
"offset": 0,
|
||||
"slot": "306",
|
||||
"type": "t_array(t_uint256)49_storage",
|
||||
"contract": "FleekAccessPoints",
|
||||
"src": "contracts/FleekAccessPoints.sol:211"
|
||||
},
|
||||
{
|
||||
"label": "_appIds",
|
||||
"offset": 0,
|
||||
"slot": "355",
|
||||
"type": "t_uint256",
|
||||
"contract": "FleekERC721",
|
||||
"src": "contracts/FleekERC721.sol:51"
|
||||
},
|
||||
{
|
||||
"label": "_apps",
|
||||
"offset": 0,
|
||||
"slot": "356",
|
||||
"type": "t_mapping(t_uint256,t_struct(Token)6538_storage)",
|
||||
"contract": "FleekERC721",
|
||||
"src": "contracts/FleekERC721.sol:52"
|
||||
},
|
||||
{
|
||||
"label": "_tokenVerifier",
|
||||
"offset": 0,
|
||||
"slot": "357",
|
||||
"type": "t_mapping(t_uint256,t_address)",
|
||||
"contract": "FleekERC721",
|
||||
"src": "contracts/FleekERC721.sol:53"
|
||||
}
|
||||
],
|
||||
"types": {
|
||||
"t_address": {
|
||||
"label": "address",
|
||||
"numberOfBytes": "20"
|
||||
},
|
||||
"t_array(t_uint256)44_storage": {
|
||||
"label": "uint256[44]",
|
||||
"numberOfBytes": "1408"
|
||||
},
|
||||
"t_array(t_uint256)49_storage": {
|
||||
"label": "uint256[49]",
|
||||
"numberOfBytes": "1568"
|
||||
},
|
||||
"t_array(t_uint256)50_storage": {
|
||||
"label": "uint256[50]",
|
||||
"numberOfBytes": "1600"
|
||||
},
|
||||
"t_bool": {
|
||||
"label": "bool",
|
||||
"numberOfBytes": "1"
|
||||
},
|
||||
"t_enum(AccessPointCreationStatus)4443": {
|
||||
"label": "enum FleekAccessPoints.AccessPointCreationStatus",
|
||||
"members": [
|
||||
"DRAFT",
|
||||
"APPROVED",
|
||||
"REJECTED",
|
||||
"REMOVED"
|
||||
],
|
||||
"numberOfBytes": "1"
|
||||
},
|
||||
"t_enum(Billing)4925": {
|
||||
"label": "enum FleekBilling.Billing",
|
||||
"members": [
|
||||
"Mint",
|
||||
"AddAccessPoint"
|
||||
],
|
||||
"numberOfBytes": "1"
|
||||
},
|
||||
"t_enum(CollectionRoles)3958": {
|
||||
"label": "enum FleekAccessControl.CollectionRoles",
|
||||
"members": [
|
||||
"Owner",
|
||||
"Verifier"
|
||||
],
|
||||
"numberOfBytes": "1"
|
||||
},
|
||||
"t_enum(TokenRoles)3960": {
|
||||
"label": "enum FleekAccessControl.TokenRoles",
|
||||
"members": [
|
||||
"Controller"
|
||||
],
|
||||
"numberOfBytes": "1"
|
||||
},
|
||||
"t_mapping(t_address,t_bool)": {
|
||||
"label": "mapping(address => bool)",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_address,t_mapping(t_address,t_bool))": {
|
||||
"label": "mapping(address => mapping(address => bool))",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_address,t_uint256)": {
|
||||
"label": "mapping(address => uint256)",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_enum(Billing)4925,t_uint256)": {
|
||||
"label": "mapping(enum FleekBilling.Billing => uint256)",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_enum(CollectionRoles)3958,t_mapping(t_address,t_bool))": {
|
||||
"label": "mapping(enum FleekAccessControl.CollectionRoles => mapping(address => bool))",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_enum(CollectionRoles)3958,t_uint256)": {
|
||||
"label": "mapping(enum FleekAccessControl.CollectionRoles => uint256)",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_enum(TokenRoles)3960,t_mapping(t_address,t_bool))": {
|
||||
"label": "mapping(enum FleekAccessControl.TokenRoles => mapping(address => bool))",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_string_memory_ptr,t_struct(AccessPoint)4457_storage)": {
|
||||
"label": "mapping(string => struct FleekAccessPoints.AccessPoint)",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_uint256,t_address)": {
|
||||
"label": "mapping(uint256 => address)",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_uint256,t_bool)": {
|
||||
"label": "mapping(uint256 => bool)",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_uint256,t_mapping(t_enum(TokenRoles)3960,t_mapping(t_address,t_bool)))": {
|
||||
"label": "mapping(uint256 => mapping(enum FleekAccessControl.TokenRoles => mapping(address => bool)))",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_uint256,t_mapping(t_uint256,t_mapping(t_enum(TokenRoles)3960,t_mapping(t_address,t_bool))))": {
|
||||
"label": "mapping(uint256 => mapping(uint256 => mapping(enum FleekAccessControl.TokenRoles => mapping(address => bool))))",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_uint256,t_struct(Build)6518_storage)": {
|
||||
"label": "mapping(uint256 => struct IERCX.Build)",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_uint256,t_struct(Token)6538_storage)": {
|
||||
"label": "mapping(uint256 => struct IERCX.Token)",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_mapping(t_uint256,t_uint256)": {
|
||||
"label": "mapping(uint256 => uint256)",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_string_memory_ptr": {
|
||||
"label": "string",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_string_storage": {
|
||||
"label": "string",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_struct(AccessPoint)4457_storage": {
|
||||
"label": "struct FleekAccessPoints.AccessPoint",
|
||||
"members": [
|
||||
{
|
||||
"label": "tokenId",
|
||||
"type": "t_uint256",
|
||||
"offset": 0,
|
||||
"slot": "0"
|
||||
},
|
||||
{
|
||||
"label": "score",
|
||||
"type": "t_uint256",
|
||||
"offset": 0,
|
||||
"slot": "1"
|
||||
},
|
||||
{
|
||||
"label": "contentVerified",
|
||||
"type": "t_bool",
|
||||
"offset": 0,
|
||||
"slot": "2"
|
||||
},
|
||||
{
|
||||
"label": "nameVerified",
|
||||
"type": "t_bool",
|
||||
"offset": 1,
|
||||
"slot": "2"
|
||||
},
|
||||
{
|
||||
"label": "owner",
|
||||
"type": "t_address",
|
||||
"offset": 2,
|
||||
"slot": "2"
|
||||
},
|
||||
{
|
||||
"label": "status",
|
||||
"type": "t_enum(AccessPointCreationStatus)4443",
|
||||
"offset": 22,
|
||||
"slot": "2"
|
||||
}
|
||||
],
|
||||
"numberOfBytes": "96"
|
||||
},
|
||||
"t_struct(Build)6518_storage": {
|
||||
"label": "struct IERCX.Build",
|
||||
"members": [
|
||||
{
|
||||
"label": "commitHash",
|
||||
"type": "t_string_storage",
|
||||
"offset": 0,
|
||||
"slot": "0"
|
||||
},
|
||||
{
|
||||
"label": "gitRepository",
|
||||
"type": "t_string_storage",
|
||||
"offset": 0,
|
||||
"slot": "1"
|
||||
}
|
||||
],
|
||||
"numberOfBytes": "64"
|
||||
},
|
||||
"t_struct(Token)6538_storage": {
|
||||
"label": "struct IERCX.Token",
|
||||
"members": [
|
||||
{
|
||||
"label": "name",
|
||||
"type": "t_string_storage",
|
||||
"offset": 0,
|
||||
"slot": "0"
|
||||
},
|
||||
{
|
||||
"label": "description",
|
||||
"type": "t_string_storage",
|
||||
"offset": 0,
|
||||
"slot": "1"
|
||||
},
|
||||
{
|
||||
"label": "externalURL",
|
||||
"type": "t_string_storage",
|
||||
"offset": 0,
|
||||
"slot": "2"
|
||||
},
|
||||
{
|
||||
"label": "ENS",
|
||||
"type": "t_string_storage",
|
||||
"offset": 0,
|
||||
"slot": "3"
|
||||
},
|
||||
{
|
||||
"label": "logo",
|
||||
"type": "t_string_storage",
|
||||
"offset": 0,
|
||||
"slot": "4"
|
||||
},
|
||||
{
|
||||
"label": "color",
|
||||
"type": "t_uint24",
|
||||
"offset": 0,
|
||||
"slot": "5"
|
||||
},
|
||||
{
|
||||
"label": "currentBuild",
|
||||
"type": "t_uint256",
|
||||
"offset": 0,
|
||||
"slot": "6"
|
||||
},
|
||||
{
|
||||
"label": "builds",
|
||||
"type": "t_mapping(t_uint256,t_struct(Build)6518_storage)",
|
||||
"offset": 0,
|
||||
"slot": "7"
|
||||
}
|
||||
],
|
||||
"numberOfBytes": "256"
|
||||
},
|
||||
"t_uint24": {
|
||||
"label": "uint24",
|
||||
"numberOfBytes": "3"
|
||||
},
|
||||
"t_uint256": {
|
||||
"label": "uint256",
|
||||
"numberOfBytes": "32"
|
||||
},
|
||||
"t_uint8": {
|
||||
"label": "uint8",
|
||||
"numberOfBytes": "1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -140,13 +140,17 @@ $ yarn deploy:hardhat
|
|||
|
||||
If the execution is successful, you will see the contract address on your screen.
|
||||
|
||||
### **Polygon Mumbai Testnet**
|
||||
### **Testnet deployments**
|
||||
|
||||
To deploy the contract on the testnet, you have to first export your wallet's private key and update the `.env.example` file at the root directory of this repository.
|
||||
|
||||
The [.env.example](./.env.example) file needs to be renamed to `.env` before continuing. Make sure you are using your private API URL, if you have one.
|
||||
|
||||
After updating the `.env` file, you can run:
|
||||
After updating the `.env` file, you can deploy the contract by following the guides below.
|
||||
|
||||
#### **Polygon Mumbai Testnet**
|
||||
|
||||
Run:
|
||||
|
||||
```
|
||||
$ yarn deploy:mumbai
|
||||
|
|
@ -154,6 +158,26 @@ $ yarn deploy:mumbai
|
|||
|
||||
to deploy the contract on the testnet. Please note that your wallet needs to hold enough Mumbai MATIC for the deployment to be successful. To reach more in-depth information about how to deploy contract checkout [this guide](https://wiki.polygon.technology/docs/develop/alchemy).
|
||||
|
||||
#### **Ethereum Sepolia Testnet**
|
||||
|
||||
Run:
|
||||
|
||||
```
|
||||
$ yarn deploy:sepolia
|
||||
```
|
||||
|
||||
to deploy the contract on the testnet. Please note that your wallet needs to hold enough Sepolia ETH for the deployment to be successful. To reach more in-depth information about how to deploy contract checkout [this guide](https://docs.alchemy.com/docs/how-to-deploy-a-smart-contract-to-the-sepolia-testnet).
|
||||
|
||||
#### **Ethereum Goerli Testnet**
|
||||
|
||||
Run:
|
||||
|
||||
```
|
||||
$ yarn deploy:goerli
|
||||
```
|
||||
|
||||
to deploy the contract on the testnet. Please note that your wallet needs to hold enough Goerli ETH for the deployment to be successful.
|
||||
|
||||
### **Deploy arguments**
|
||||
|
||||
For any of the deploy scripts above you are able to input arguments to change the date sent during the deployment. They are:
|
||||
|
|
@ -189,7 +213,7 @@ to deploy the contract on the testnet. Please note that your wallet needs to hol
|
|||
|
||||
## ▶️ Interaction scripts
|
||||
|
||||
Right away, in the [scripts](./scripts/) folder you are able to see some scripts that will help you to interact with deployed contracts. By default you are able to select `localhost`, `hardhat` or `mumbai` network name predefined on [hardhat.config.ts](./hardhat.config.ts). The scripts will be using the deployment information stored in the [deployments](./deployments/) folder. You should have a nested folder for each of the networks you have deployed it. The scripts needs be run using the Hardhat environment following the pattern:
|
||||
Right away, in the [scripts](./scripts/) folder you are able to see some scripts that will help you to interact with deployed contracts. By default you are able to select `localhost`, `hardhat`, `mumbai`, `sepolia` or `goerli` network name predefined on [hardhat.config.ts](./hardhat.config.ts). The scripts will be using the deployment information stored in the [deployments](./deployments/) folder. You should have a nested folder for each of the networks you have deployed it. The scripts needs be run using the Hardhat environment following the pattern:
|
||||
|
||||
```bash
|
||||
# Replace <script_name> with the selected script
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ abstract contract FleekBilling is Initializable {
|
|||
/**
|
||||
* @dev Mapping of billing values.
|
||||
*/
|
||||
mapping(Billing => uint256) public _billings;
|
||||
mapping(Billing => uint256) private _billings;
|
||||
|
||||
/**
|
||||
* @dev Initializes the contract by setting default billing values.
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import "./FleekAccessControl.sol";
|
|||
import "./FleekBilling.sol";
|
||||
import "./FleekPausable.sol";
|
||||
import "./FleekAccessPoints.sol";
|
||||
import "./util/FleekENS.sol";
|
||||
import "./util/FleekStrings.sol";
|
||||
import "./IERCX.sol";
|
||||
|
||||
|
|
@ -51,6 +52,19 @@ contract FleekERC721 is
|
|||
uint256 private _appIds;
|
||||
mapping(uint256 => Token) private _apps;
|
||||
mapping(uint256 => address) private _tokenVerifier;
|
||||
mapping(uint256 => bool) private _tokenVerified;
|
||||
|
||||
/**
|
||||
* @dev This constructor sets the state of implementation contract to paused
|
||||
* and disable initializers, not allowing interactions with the implementation
|
||||
* contracts.
|
||||
*/
|
||||
/// @custom:oz-upgrades-unsafe-allow constructor
|
||||
constructor() {
|
||||
_setPausable(true);
|
||||
_pause();
|
||||
_disableInitializers();
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Initializes the contract by setting a `name` and a `symbol` to the token collection.
|
||||
|
|
@ -91,7 +105,7 @@ contract FleekERC721 is
|
|||
string memory name,
|
||||
string memory description,
|
||||
string memory externalURL,
|
||||
string memory ENS,
|
||||
string calldata ens,
|
||||
string memory commitHash,
|
||||
string memory gitRepository,
|
||||
string memory logo,
|
||||
|
|
@ -99,6 +113,7 @@ contract FleekERC721 is
|
|||
bool accessPointAutoApproval,
|
||||
address verifier
|
||||
) public payable requirePayment(Billing.Mint) returns (uint256) {
|
||||
FleekENS.requireENSOwner(ens);
|
||||
uint256 tokenId = _appIds;
|
||||
_mint(to, tokenId);
|
||||
|
||||
|
|
@ -108,7 +123,7 @@ contract FleekERC721 is
|
|||
app.name = name;
|
||||
app.description = description;
|
||||
app.externalURL = externalURL;
|
||||
app.ENS = ENS;
|
||||
app.ENS = ens;
|
||||
app.logo = logo;
|
||||
app.color = color;
|
||||
|
||||
|
|
@ -121,7 +136,7 @@ contract FleekERC721 is
|
|||
name,
|
||||
description,
|
||||
externalURL,
|
||||
ENS,
|
||||
ens,
|
||||
commitHash,
|
||||
gitRepository,
|
||||
logo,
|
||||
|
|
@ -133,6 +148,7 @@ contract FleekERC721 is
|
|||
);
|
||||
|
||||
_tokenVerifier[tokenId] = verifier;
|
||||
_tokenVerified[tokenId] = false;
|
||||
_setAccessPointAutoApproval(tokenId, accessPointAutoApproval);
|
||||
|
||||
return tokenId;
|
||||
|
|
@ -152,9 +168,10 @@ contract FleekERC721 is
|
|||
_requireMinted(tokenId);
|
||||
address owner = ownerOf(tokenId);
|
||||
bool accessPointAutoApproval = _getAccessPointAutoApproval(tokenId);
|
||||
bool verified = _tokenVerified[tokenId];
|
||||
Token storage app = _apps[tokenId];
|
||||
|
||||
return string(abi.encodePacked(_baseURI(), app.toString(owner, accessPointAutoApproval).toBase64()));
|
||||
return string(abi.encodePacked(_baseURI(), app.toString(owner, accessPointAutoApproval, verified).toBase64()));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -259,8 +276,9 @@ contract FleekERC721 is
|
|||
*/
|
||||
function setTokenENS(
|
||||
uint256 tokenId,
|
||||
string memory _tokenENS
|
||||
string calldata _tokenENS
|
||||
) public virtual requireTokenRole(tokenId, TokenRoles.Controller) {
|
||||
FleekENS.requireENSOwner(_tokenENS);
|
||||
_requireMinted(tokenId);
|
||||
_apps[tokenId].ENS = _tokenENS;
|
||||
emit MetadataUpdate(tokenId, "ENS", _tokenENS, msg.sender);
|
||||
|
|
@ -437,6 +455,40 @@ contract FleekERC721 is
|
|||
return _tokenVerifier[tokenId];
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Sets the verification status of a token.
|
||||
*
|
||||
* May emit a {MetadataUpdate} event.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - the tokenId must be minted and valid.
|
||||
* - the sender must be the token verifier.
|
||||
* - the sender must have `CollectionRoles.Verifier` role.
|
||||
*
|
||||
*/
|
||||
function setTokenVerified(
|
||||
uint256 tokenId,
|
||||
bool verified
|
||||
) public requireCollectionRole(CollectionRoles.Verifier) requireTokenVerifier(tokenId) {
|
||||
_requireMinted(tokenId);
|
||||
_tokenVerified[tokenId] = verified;
|
||||
emit MetadataUpdate(tokenId, "verified", verified, msg.sender);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the verification status of a token.
|
||||
*
|
||||
* Requirements:
|
||||
*
|
||||
* - the tokenId must be minted and valid.
|
||||
*
|
||||
*/
|
||||
function isTokenVerified(uint256 tokenId) public view returns (bool) {
|
||||
_requireMinted(tokenId);
|
||||
return _tokenVerified[tokenId];
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////
|
||||
ACCESS POINTS
|
||||
//////////////////////////////////////////////////////////////*/
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.7;
|
||||
|
||||
import "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol";
|
||||
import {ENS} from "@ensdomains/ens-contracts/contracts/registry/ENS.sol";
|
||||
import {Resolver} from "@ensdomains/ens-contracts/contracts/resolvers/Resolver.sol";
|
||||
|
||||
error MustBeENSOwner();
|
||||
|
||||
library FleekENS {
|
||||
ENS internal constant _ens = ENS(0x00000000000C2E074eC69A0dFb2997BA6C7d2e1e);
|
||||
|
||||
/**
|
||||
* @dev Reverts if the sender is not the owner of the ENS node.
|
||||
*/
|
||||
function requireENSOwner(string calldata name) internal view {
|
||||
if (_ens.owner(namehash(bytes(name), 0)) != msg.sender) revert MustBeENSOwner();
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Processes the name and returns the ENS node hash.
|
||||
*/
|
||||
function namehash(bytes calldata name, uint256 index) internal view returns (bytes32) {
|
||||
for (uint256 i = index; i < name.length; i++) {
|
||||
if (name[i] == ".") {
|
||||
return keccak256(abi.encodePacked(namehash(name, i + 1), keccak256(name[index:i])));
|
||||
}
|
||||
}
|
||||
return keccak256(abi.encodePacked(bytes32(0x0), keccak256(name[index:name.length])));
|
||||
}
|
||||
}
|
||||
|
|
@ -36,7 +36,8 @@ library FleekStrings {
|
|||
function toString(
|
||||
IERCX.Token storage app,
|
||||
address owner,
|
||||
bool accessPointAutoApproval
|
||||
bool accessPointAutoApproval,
|
||||
bool verified
|
||||
) internal view returns (string memory) {
|
||||
// prettier-ignore
|
||||
return string(abi.encodePacked(
|
||||
|
|
@ -47,6 +48,7 @@ library FleekStrings {
|
|||
'"external_url":"', app.externalURL, '",',
|
||||
'"image":"', FleekSVG.generateBase64(app.name, app.ENS, app.logo, app.color.toColorString()), '",',
|
||||
'"access_point_auto_approval":', accessPointAutoApproval.toString(),',',
|
||||
'"verified":',verified.toString(),',',
|
||||
'"attributes": [',
|
||||
'{"trait_type": "ENS", "value":"', app.ENS,'"},',
|
||||
'{"trait_type": "Commit Hash", "value":"', app.builds[app.currentBuild].commitHash,'"},',
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"FleekERC721": [
|
||||
{
|
||||
"address": "0x8795608346Eb475E42e69F1281008AEAa522479D",
|
||||
"timestamp": "3/17/2023, 3:01:30 PM"
|
||||
}
|
||||
]
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
|
@ -4,3 +4,8 @@ out = 'out'
|
|||
libs = ['node_modules', 'lib']
|
||||
test = 'test/foundry'
|
||||
cache_path = 'forge-cache'
|
||||
via_ir = true
|
||||
|
||||
[rpc_endpoints]
|
||||
mainnet = "${MAINNET_API_KEY}"
|
||||
|
||||
|
|
|
|||
|
|
@ -14,10 +14,15 @@ import deploy from './scripts/deploy';
|
|||
dotenv.config();
|
||||
|
||||
const {
|
||||
API_URL = 'https://polygon-mainnet.alchemyapi.io/v2/your-api-key',
|
||||
PRIVATE_KEY,
|
||||
REPORT_GAS,
|
||||
POLYGONSCAN_KEY,
|
||||
ETHERSCAN_API_KEY,
|
||||
POLYGON_API_URL,
|
||||
ETH_MAIN_API_URL,
|
||||
ETH_SEPOLIA_API_URL,
|
||||
ETH_GOERLI_API_URL,
|
||||
MAINNET_API_KEY,
|
||||
COINMARKETCAP_KEY,
|
||||
} = process.env;
|
||||
|
||||
const config: HardhatUserConfig = {
|
||||
|
|
@ -25,19 +30,40 @@ const config: HardhatUserConfig = {
|
|||
networks: {
|
||||
hardhat: {
|
||||
chainId: 31337,
|
||||
forking: MAINNET_API_KEY
|
||||
? {
|
||||
url: MAINNET_API_KEY,
|
||||
blockNumber: 16876149,
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
mumbai: {
|
||||
url: API_URL,
|
||||
url: POLYGON_API_URL ? POLYGON_API_URL : '',
|
||||
accounts: PRIVATE_KEY ? [PRIVATE_KEY] : [],
|
||||
chainId: 80001,
|
||||
},
|
||||
goerli: {
|
||||
url: ETH_GOERLI_API_URL ? ETH_GOERLI_API_URL : '',
|
||||
accounts: PRIVATE_KEY ? [PRIVATE_KEY] : [],
|
||||
chainId: 5,
|
||||
},
|
||||
sepolia: {
|
||||
url: ETH_SEPOLIA_API_URL ? ETH_SEPOLIA_API_URL : '',
|
||||
accounts: PRIVATE_KEY ? [PRIVATE_KEY] : [],
|
||||
chainId: 11155111,
|
||||
},
|
||||
mainnet: {
|
||||
url: ETH_MAIN_API_URL ? ETH_MAIN_API_URL : '',
|
||||
accounts: PRIVATE_KEY ? [PRIVATE_KEY] : [],
|
||||
chainId: 1,
|
||||
},
|
||||
},
|
||||
gasReporter: {
|
||||
enabled: REPORT_GAS === 'true' || false,
|
||||
currency: 'USD',
|
||||
outputFile: 'gas-report.txt',
|
||||
outputFile: 'gas-report',
|
||||
noColors: true,
|
||||
// coinmarketcap: process.env.COINMARKETCAP_API_KEY,
|
||||
coinmarketcap: COINMARKETCAP_KEY,
|
||||
},
|
||||
contractSizer: {
|
||||
runOnCompile: false,
|
||||
|
|
@ -60,15 +86,17 @@ const config: HardhatUserConfig = {
|
|||
timeout: 200000, // 200 seconds max for running tests
|
||||
},
|
||||
etherscan: {
|
||||
apiKey: {
|
||||
polygonMumbai: POLYGONSCAN_KEY,
|
||||
},
|
||||
// apiKey: {
|
||||
// polygonMumbai: POLYGONSCAN_KEY,
|
||||
// },
|
||||
apiKey: ETHERSCAN_API_KEY ? ETHERSCAN_API_KEY : '',
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
|
||||
// npx hardhat deploy --network mumbai --new-proxy-instance --name "FleekNFAs" --symbol "FLKNFA" --billing "[10000, 20000]"
|
||||
// Use the following command to deploy where the network flag can be replaced with the network you choose:
|
||||
// npx hardhat deploy --network goerli --new-proxy-instance --name "FleekNFAs" --symbol "FLKNFA" --billing "[10000, 20000]"
|
||||
task('deploy', 'Deploy the contracts')
|
||||
.addFlag('newProxyInstance', 'Force to deploy a new proxy instance')
|
||||
.addOptionalParam('name', 'The collection name', 'FleekNFAs', types.string)
|
||||
|
|
|
|||
|
|
@ -4,15 +4,19 @@
|
|||
"description": "",
|
||||
"private": "false",
|
||||
"scripts": {
|
||||
"test": "hardhat test && forge test --via-ir",
|
||||
"test:foundry": "forge test --via-ir -vvv",
|
||||
"test": "yarn test:hardhat && yarn test:foundry",
|
||||
"test:foundry": "forge test -vvv --fork-url mainnet --fork-block-number 16876149",
|
||||
"test:hardhat": "hardhat test",
|
||||
"format": "prettier --write \"./**/*.{js,json,sol,ts}\"",
|
||||
"node:hardhat": "hardhat node",
|
||||
"deploy:hardhat": "hardhat deploy --network hardhat",
|
||||
"deploy:mumbai": "hardhat deploy --network mumbai",
|
||||
"deploy:sepolia": "hardhat deploy --network sepolia",
|
||||
"deploy:goerli": "hardhat deploy --network goerli",
|
||||
"compile": "hardhat compile",
|
||||
"verify:mumbai": "npx hardhat run ./scripts/verify-polyscan.js --network mumbai"
|
||||
"verify:mumbai": "npx hardhat run ./scripts/verify.js --network mumbai",
|
||||
"verify:goerli": "npx hardhat run ./scripts/verify.js --network goerli",
|
||||
"verify:sepolia": "npx hardhat run ./scripts/verify.js --network sepolia"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
|
@ -25,6 +29,8 @@
|
|||
},
|
||||
"homepage": "https://github.com/fleekxyz/non-fungible-apps#readme",
|
||||
"devDependencies": {
|
||||
"@ensdomains/ens-contracts": "^0.0.20",
|
||||
"@ensdomains/eth-ens-namehash": "^2.0.15",
|
||||
"@nomicfoundation/hardhat-chai-matchers": "^1.0.5",
|
||||
"@nomicfoundation/hardhat-network-helpers": "^1.0.7",
|
||||
"@nomicfoundation/hardhat-toolbox": "^2.0.0",
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ module.exports = async (taskArgs, hre) => {
|
|||
console.log(taskArgs);
|
||||
console.log();
|
||||
|
||||
const arguments = [name, symbol, billing];
|
||||
const deployArguments = [name, symbol, billing];
|
||||
|
||||
const libraries = await libraryDeployment(hre);
|
||||
|
||||
|
|
@ -80,7 +80,7 @@ module.exports = async (taskArgs, hre) => {
|
|||
console.log('Creating new proxy contract...');
|
||||
deployResult = await upgrades.deployProxy(
|
||||
Contract,
|
||||
arguments,
|
||||
deployArguments,
|
||||
DEFAULT_PROXY_SETTINGS
|
||||
);
|
||||
await deployResult.deployed();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// npx hardhat run scripts/mint.js --network mumbai
|
||||
// npx hardhat run scripts/mint.js --network mumbai/sepolia/goerli
|
||||
const { getContract } = require('./util');
|
||||
const { getSVGBase64, getSVGColor } = require('./utils/read-svg');
|
||||
const path = require('path');
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// npx hardhat run scripts/tokenURI.js --network mumbai
|
||||
// npx hardhat run scripts/tokenURI.js --network mumbai/sepolia/goerli
|
||||
const { getContract } = require('./util');
|
||||
|
||||
// TODO: make this arguments
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// npx hardhat run scripts/upgrade.js --network mumbai
|
||||
// npx hardhat run scripts/upgrade.js --network mumbai/sepolia/goerli
|
||||
const { getContract } = require('./util');
|
||||
|
||||
// TODO: make this arguments
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const { getImplementationAddress } = require('@openzeppelin/upgrades-core');
|
||||
const { writeFile } = require('./file');
|
||||
const { existsSync } = require('fs');
|
||||
const path = require('path');
|
||||
|
|
@ -38,9 +39,16 @@ const deployStore = async (network, contractName, contract) => {
|
|||
const { buildId, solcInput, abi, bytecode, metadata, storageLayout } =
|
||||
await getBuildData(contractName);
|
||||
|
||||
const implementationAddress = await getImplementationAddress(
|
||||
hre.network.provider,
|
||||
contract.address
|
||||
);
|
||||
|
||||
const data = {
|
||||
buildId,
|
||||
timestamp: new Date().toLocaleString('en-US'),
|
||||
address: contract.address,
|
||||
implementationAddress,
|
||||
transactionHash: contract.deployTransaction.hash,
|
||||
args: contract.deployTransaction.args,
|
||||
gasPrice: contract.deployTransaction.gasPrice.toNumber(),
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
const { ethers } = require('hardhat');
|
||||
const { address } = require('../deployments/mumbai/FleekERC721.json');
|
||||
require('@nomiclabs/hardhat-etherscan');
|
||||
|
||||
const networkName = hre.network.name;
|
||||
const { address } = require(`../deployments/${networkName}/FleekERC721.json`);
|
||||
require('@nomiclabs/hardhat-etherscan');
|
||||
|
||||
async function main() {
|
||||
|
|
@ -162,6 +162,7 @@ contract Test_FleekERC721_AccessControl is Test_FleekERC721_Base, Test_FleekERC7
|
|||
|
||||
function test_mint() public {
|
||||
// Anyone can mint
|
||||
transferENS(TestConstants.APP_ENS, anyAddress);
|
||||
vm.startPrank(anyAddress);
|
||||
mintDefault(address(99));
|
||||
vm.stopPrank();
|
||||
|
|
@ -201,7 +202,7 @@ contract Test_FleekERC721_AccessControl is Test_FleekERC721_Base, Test_FleekERC7
|
|||
}
|
||||
|
||||
function test_setTokenENS() public {
|
||||
string memory ens = "ens";
|
||||
string memory ens = "ens.eth";
|
||||
|
||||
// ColletionOwner
|
||||
vm.prank(collectionOwner);
|
||||
|
|
@ -214,10 +215,12 @@ contract Test_FleekERC721_AccessControl is Test_FleekERC721_Base, Test_FleekERC7
|
|||
CuT.setTokenENS(tokenId, ens);
|
||||
|
||||
// TokenOwner
|
||||
transferENS(ens, tokenOwner);
|
||||
vm.prank(tokenOwner);
|
||||
CuT.setTokenENS(tokenId, ens);
|
||||
|
||||
// TokenController
|
||||
transferENS(ens, tokenController);
|
||||
vm.prank(tokenController);
|
||||
CuT.setTokenENS(tokenId, ens);
|
||||
|
||||
|
|
@ -594,6 +597,61 @@ contract Test_FleekERC721_AccessControl is Test_FleekERC721_Base, Test_FleekERC7
|
|||
CuT.setPausable(true);
|
||||
}
|
||||
|
||||
function test_setTokenVerifier() public {
|
||||
address otherVerifier = address(0x1234);
|
||||
CuT.grantCollectionRole(FleekAccessControl.CollectionRoles.Verifier, otherVerifier);
|
||||
|
||||
// ColletionOwner
|
||||
vm.prank(collectionOwner);
|
||||
expectRevertWithMustBeTokenOwner(tokenId);
|
||||
CuT.setTokenVerifier(tokenId, otherVerifier);
|
||||
|
||||
// CollectionVerifier
|
||||
vm.prank(collectionVerifier);
|
||||
expectRevertWithMustBeTokenOwner(tokenId);
|
||||
CuT.setTokenVerifier(tokenId, otherVerifier);
|
||||
|
||||
// TokenOwner
|
||||
vm.prank(tokenOwner);
|
||||
CuT.setTokenVerifier(tokenId, otherVerifier);
|
||||
|
||||
// TokenController
|
||||
vm.prank(tokenController);
|
||||
expectRevertWithMustBeTokenOwner(tokenId);
|
||||
CuT.setTokenVerifier(tokenId, collectionVerifier);
|
||||
|
||||
// AnyAddress
|
||||
vm.prank(anyAddress);
|
||||
expectRevertWithMustBeTokenOwner(tokenId);
|
||||
CuT.setTokenVerifier(tokenId, collectionVerifier);
|
||||
}
|
||||
|
||||
function test_setTokenVerified() public {
|
||||
// CollectionOwner
|
||||
vm.prank(collectionOwner);
|
||||
expectRevertWithCollectionRole(FleekAccessControl.CollectionRoles.Verifier);
|
||||
CuT.setTokenVerified(tokenId, true);
|
||||
|
||||
// CollectionVerifier
|
||||
vm.prank(collectionVerifier);
|
||||
CuT.setTokenVerified(tokenId, true);
|
||||
|
||||
// TokenOwner
|
||||
vm.prank(tokenOwner);
|
||||
expectRevertWithCollectionRole(FleekAccessControl.CollectionRoles.Verifier);
|
||||
CuT.setTokenVerified(tokenId, false);
|
||||
|
||||
// TokenController
|
||||
vm.prank(tokenController);
|
||||
expectRevertWithCollectionRole(FleekAccessControl.CollectionRoles.Verifier);
|
||||
CuT.setTokenVerified(tokenId, false);
|
||||
|
||||
// AnyAddress
|
||||
vm.prank(anyAddress);
|
||||
expectRevertWithCollectionRole(FleekAccessControl.CollectionRoles.Verifier);
|
||||
CuT.setTokenVerified(tokenId, false);
|
||||
}
|
||||
|
||||
function test_cannotHaveLessThanOneCollectionOwner() public {
|
||||
CuT.revokeCollectionRole(FleekAccessControl.CollectionRoles.Owner, collectionOwner);
|
||||
expectRevertWithMustHaveAtLeastOneOwner();
|
||||
|
|
|
|||
|
|
@ -7,10 +7,6 @@ import {Strings} from "@openzeppelin/contracts/utils/Strings.sol";
|
|||
contract APConstants is Test {
|
||||
using Strings for address;
|
||||
|
||||
function expectRevertWithMustBeTokenVerifier(uint256 tokenId) public {
|
||||
vm.expectRevert(abi.encodeWithSelector(MustBeTokenVerifier.selector, tokenId));
|
||||
}
|
||||
|
||||
function assertAccessPointJSON(
|
||||
string memory accessPointName,
|
||||
string memory _tokenId,
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -23,7 +23,7 @@ contract Test_FleekERC721_Deploy is Test_FleekERC721_Base {
|
|||
}
|
||||
|
||||
function testFuzz_nameAndSymbol(string memory _name, string memory _symbol) public {
|
||||
CuT = new FleekERC721();
|
||||
CuT = deployUninitialized();
|
||||
CuT.initialize(_name, _symbol, new uint256[](0));
|
||||
|
||||
assertEq(CuT.name(), _name);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.17;
|
||||
|
||||
import "./TestBase.sol";
|
||||
import {Utils} from "./Utils.sol";
|
||||
|
||||
contract Test_FleekERC721_ENS is Test_FleekERC721_Base {
|
||||
function expectRevertWithMustBeENSOwner() internal {
|
||||
vm.expectRevert(MustBeENSOwner.selector);
|
||||
}
|
||||
|
||||
function setUp() public {
|
||||
baseSetUp();
|
||||
}
|
||||
|
||||
function testFuzz_cannotMintIfNotENSOwner(address account) public {
|
||||
vm.assume(deployer != account);
|
||||
vm.assume(account != address(0));
|
||||
vm.prank(account);
|
||||
expectRevertWithMustBeENSOwner();
|
||||
mintDefault(account);
|
||||
}
|
||||
|
||||
function testFuzz_cannotSetTokenENSIfNotENSOwner(address account) public {
|
||||
vm.assume(deployer != account);
|
||||
vm.assume(account != address(0));
|
||||
mintDefault(account);
|
||||
|
||||
vm.prank(account);
|
||||
expectRevertWithMustBeENSOwner();
|
||||
CuT.setTokenENS(0, TestConstants.APP_ENS);
|
||||
}
|
||||
}
|
||||
|
|
@ -18,7 +18,7 @@ contract Test_FleekERC721_GetToken is Test_FleekERC721_Base {
|
|||
string memory name,
|
||||
string memory description,
|
||||
string memory externalURL,
|
||||
string memory ENS,
|
||||
string memory ens,
|
||||
uint256 currentBuild,
|
||||
string memory logo,
|
||||
uint24 color
|
||||
|
|
@ -28,7 +28,7 @@ contract Test_FleekERC721_GetToken is Test_FleekERC721_Base {
|
|||
assertEq(externalURL, TestConstants.APP_EXTERNAL_URL);
|
||||
assertEq(logo, TestConstants.LOGO_0);
|
||||
assertEq(color, TestConstants.APP_COLOR);
|
||||
assertEq(ENS, TestConstants.APP_ENS);
|
||||
assertEq(ens, TestConstants.APP_ENS);
|
||||
assertEq(currentBuild, 0);
|
||||
}
|
||||
|
||||
|
|
@ -45,6 +45,7 @@ contract Test_FleekERC721_GetToken is Test_FleekERC721_Base {
|
|||
CuT.setTokenName(tokenId, newAppName);
|
||||
CuT.setTokenDescription(tokenId, newDescription);
|
||||
CuT.setTokenExternalURL(tokenId, newExternalURL);
|
||||
transferENS(newENS, deployer);
|
||||
CuT.setTokenENS(tokenId, newENS);
|
||||
CuT.setTokenBuild(tokenId, newCommitHash, newRepository);
|
||||
CuT.setTokenLogoAndColor(tokenId, newLogo, newColor);
|
||||
|
|
@ -53,7 +54,7 @@ contract Test_FleekERC721_GetToken is Test_FleekERC721_Base {
|
|||
string memory name,
|
||||
string memory description,
|
||||
string memory externalURL,
|
||||
string memory ENS,
|
||||
string memory ens,
|
||||
uint256 currentBuild,
|
||||
string memory logo,
|
||||
uint24 color
|
||||
|
|
@ -63,7 +64,7 @@ contract Test_FleekERC721_GetToken is Test_FleekERC721_Base {
|
|||
assertEq(externalURL, newExternalURL);
|
||||
assertEq(logo, newLogo);
|
||||
assertEq(color, newColor);
|
||||
assertEq(ENS, newENS);
|
||||
assertEq(ens, newENS);
|
||||
assertEq(currentBuild, 1);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,8 @@ contract Test_FleekERC721_Mint is Test_FleekERC721_Base {
|
|||
|
||||
function test_mintTwoTokensForTwoAddresses() public {
|
||||
uint256 firstMint = mintDefault(deployer);
|
||||
|
||||
transferENS("fleek.eth", deployer);
|
||||
uint256 secondMint = CuT.mint(
|
||||
address(12),
|
||||
"Different App Name",
|
||||
|
|
@ -45,6 +47,7 @@ contract Test_FleekERC721_Mint is Test_FleekERC721_Base {
|
|||
}
|
||||
|
||||
function test_mintWithAutoApprovalAPsOn() public {
|
||||
transferENS("fleek.eth", deployer);
|
||||
uint256 mint = CuT.mint(
|
||||
address(12),
|
||||
"Different App Name",
|
||||
|
|
@ -83,6 +86,7 @@ contract Test_FleekERC721_Mint is Test_FleekERC721_Base {
|
|||
bool autoApprovalAp
|
||||
) public {
|
||||
vm.assume(to != address(0));
|
||||
transferENS(ens, deployer);
|
||||
uint256 tokenId = CuT.mint(
|
||||
to,
|
||||
appName,
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ pragma solidity ^0.8.17;
|
|||
import "forge-std/Test.sol";
|
||||
import "contracts/FleekERC721.sol";
|
||||
import {TestConstants} from "./Constants.sol";
|
||||
import {Utils} from "./Utils.sol";
|
||||
|
||||
abstract contract Test_FleekERC721_Assertions is Test {
|
||||
function expectRevertWithTokenRole(uint256 tokenId, FleekAccessControl.TokenRoles role) public {
|
||||
|
|
@ -38,16 +39,37 @@ abstract contract Test_FleekERC721_Assertions is Test {
|
|||
function expectRevertWithInvalidTokenId() public {
|
||||
vm.expectRevert("ERC721: invalid token ID");
|
||||
}
|
||||
|
||||
function expectRevertWithMustBeTokenVerifier(uint256 tokenId) public {
|
||||
vm.expectRevert(abi.encodeWithSelector(MustBeTokenVerifier.selector, tokenId));
|
||||
}
|
||||
}
|
||||
|
||||
abstract contract Test_FleekERC721_Base is Test, Test_FleekERC721_Assertions {
|
||||
FleekERC721 internal CuT; // Contract Under Test
|
||||
address internal deployer;
|
||||
ENS internal constant _ens = ENS(0x00000000000C2E074eC69A0dFb2997BA6C7d2e1e);
|
||||
|
||||
function deployUninitialized() internal returns (FleekERC721) {
|
||||
FleekERC721 _contract = new FleekERC721();
|
||||
vm.store(address(_contract), bytes32(0), bytes32(0)); // Overrides `_initialized` and `_initializing` states
|
||||
return _contract;
|
||||
}
|
||||
|
||||
function baseSetUp() internal {
|
||||
CuT = new FleekERC721();
|
||||
vm.prank(address(CuT));
|
||||
CuT = deployUninitialized();
|
||||
CuT.initialize("Test Contract", "FLKAPS", new uint256[](0));
|
||||
deployer = address(this);
|
||||
transferENS(TestConstants.APP_ENS, deployer);
|
||||
}
|
||||
|
||||
function transferENS(string memory ens, address newOwner) public {
|
||||
bytes32 node = Utils.namehash(ens);
|
||||
address ensOwner = _ens.owner(node);
|
||||
vm.deal(ensOwner, 100000000000);
|
||||
vm.prank(ensOwner);
|
||||
_ens.setOwner(node, newOwner);
|
||||
}
|
||||
|
||||
function mintDefault(address to) internal returns (uint256) {
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,16 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.17;
|
||||
|
||||
import {FleekENS} from "contracts/util/FleekENS.sol";
|
||||
|
||||
library Utils {
|
||||
/**
|
||||
* @dev This function is copyed from `FleekENS.sol`.
|
||||
* It changes the `internal` modifier to `public` allowing it
|
||||
* to be used in tests applying memory values
|
||||
*/
|
||||
function namehash(string calldata name) public view returns (bytes32) {
|
||||
return FleekENS.namehash(bytes(name), 0);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
|
||||
pragma solidity ^0.8.17;
|
||||
|
||||
import "./TestBase.sol";
|
||||
import {TestConstants} from "./Constants.sol";
|
||||
import {FleekAccessControl} from "../../../contracts/FleekAccessControl.sol";
|
||||
|
||||
contract Test_FleekERC721_Verified is Test_FleekERC721_Base {
|
||||
uint256 internal tokenId;
|
||||
|
||||
function setUp() public {
|
||||
baseSetUp();
|
||||
tokenId = mintDefault(deployer);
|
||||
}
|
||||
|
||||
function test_shouldNotBeVerifiedAfterMint() public {
|
||||
assertFalse(CuT.isTokenVerified(tokenId));
|
||||
}
|
||||
|
||||
function test_shouldVerifyToken() public {
|
||||
CuT.setTokenVerified(tokenId, true);
|
||||
assertTrue(CuT.isTokenVerified(tokenId));
|
||||
}
|
||||
|
||||
function test_verifyAndUnverify() public {
|
||||
CuT.setTokenVerified(tokenId, true);
|
||||
assertTrue(CuT.isTokenVerified(tokenId));
|
||||
CuT.setTokenVerified(tokenId, false);
|
||||
assertFalse(CuT.isTokenVerified(tokenId));
|
||||
}
|
||||
|
||||
function testFuzz_shouldNotAllowVerifyIfHasNotVerifierRole(address verifier) public {
|
||||
vm.assume(!CuT.hasCollectionRole(FleekAccessControl.CollectionRoles.Verifier, verifier));
|
||||
|
||||
vm.prank(verifier);
|
||||
expectRevertWithCollectionRole(FleekAccessControl.CollectionRoles.Verifier);
|
||||
CuT.setTokenVerified(tokenId, true);
|
||||
}
|
||||
|
||||
function testFuzz_shouldNotAllowVerifyIfIsNotTokenVerifier(address verifier) public {
|
||||
vm.assume(CuT.getTokenVerifier(tokenId) != verifier);
|
||||
CuT.grantCollectionRole(FleekAccessControl.CollectionRoles.Verifier, verifier);
|
||||
|
||||
vm.prank(verifier);
|
||||
expectRevertWithMustBeTokenVerifier(tokenId);
|
||||
CuT.setTokenVerified(tokenId, true);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +1,5 @@
|
|||
import { loadFixture } from '@nomicfoundation/hardhat-network-helpers';
|
||||
import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers';
|
||||
import { expect } from 'chai';
|
||||
import { ethers } from 'hardhat';
|
||||
import { TestConstants, Fixtures, Errors } from '../helpers';
|
||||
const { AccessPointStatus } = TestConstants;
|
||||
|
||||
|
|
@ -19,7 +17,7 @@ describe('FleekERC721.AccessPoints.AutoApprovalOff', () => {
|
|||
.to.emit(contract, 'NewAccessPoint')
|
||||
.withArgs('accesspoint.com', tokenId, owner.address);
|
||||
|
||||
let ap = await contract.getAccessPointJSON('accesspoint.com');
|
||||
const ap = await contract.getAccessPointJSON('accesspoint.com');
|
||||
const parsedAp = JSON.parse(ap);
|
||||
|
||||
expect(parsedAp).to.eql({
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ describe('FleekERC721.AccessPoints.AutoApprovalOn', () => {
|
|||
});
|
||||
|
||||
it('should revert if AP does not exist', async () => {
|
||||
const { contract, tokenId } = fixture;
|
||||
const { contract } = fixture;
|
||||
|
||||
await expect(
|
||||
contract.getAccessPointJSON('random.com')
|
||||
|
|
@ -88,7 +88,7 @@ describe('FleekERC721.AccessPoints.AutoApprovalOn', () => {
|
|||
});
|
||||
|
||||
it('should allow anyone to change AP score', async () => {
|
||||
const { contract, otherAccount, tokenId } = fixture;
|
||||
const { contract, otherAccount } = fixture;
|
||||
|
||||
await contract.increaseAccessPointScore(DefaultAP);
|
||||
await contract.connect(otherAccount).increaseAccessPointScore(DefaultAP);
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import { loadFixture } from '@nomicfoundation/hardhat-network-helpers';
|
|||
import { ethers } from 'hardhat';
|
||||
import { expect } from 'chai';
|
||||
import { Fixtures, TestConstants, Errors } from './helpers';
|
||||
import { BigNumber } from 'ethers';
|
||||
|
||||
const { Billing, MintParams } = TestConstants;
|
||||
|
||||
|
|
@ -10,7 +11,7 @@ describe('FleekERC721.Billing', () => {
|
|||
const mintPrice = ethers.utils.parseEther('1');
|
||||
const addAPPrice = ethers.utils.parseEther('1');
|
||||
|
||||
const mint = (value?: any) => {
|
||||
const mint = (value?: BigNumber) => {
|
||||
const { contract, owner } = fixture;
|
||||
return contract.mint(
|
||||
owner.address,
|
||||
|
|
@ -28,7 +29,7 @@ describe('FleekERC721.Billing', () => {
|
|||
);
|
||||
};
|
||||
|
||||
const addAP = (value?: any) => {
|
||||
const addAP = (value?: BigNumber) => {
|
||||
const { contract } = fixture;
|
||||
return contract.addAccessPoint(0, 'random.com', { value });
|
||||
};
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ describe('FleekERC721.CollectionRoles', () => {
|
|||
});
|
||||
|
||||
it('should fetch the list of owners', async () => {
|
||||
const { owner, contract, otherAccount } = fixture;
|
||||
const { contract, otherAccount } = fixture;
|
||||
|
||||
await contract.grantCollectionRole(
|
||||
CollectionRoles.Owner,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,56 @@
|
|||
import { expect } from 'chai';
|
||||
import { TestConstants, Fixtures, Errors, transferENSNode } from './helpers';
|
||||
import { loadFixture } from '@nomicfoundation/hardhat-network-helpers';
|
||||
const { MintParams } = TestConstants;
|
||||
|
||||
describe('FleekERC721.ENS', () => {
|
||||
let fixture: Awaited<ReturnType<typeof Fixtures.default>>;
|
||||
|
||||
beforeEach(async () => {
|
||||
fixture = await loadFixture(Fixtures.default);
|
||||
});
|
||||
|
||||
it('should not allow mint if not ENS owner', async () => {
|
||||
const { contract, owner } = fixture;
|
||||
|
||||
await expect(
|
||||
contract.mint(
|
||||
owner.address,
|
||||
MintParams.name,
|
||||
MintParams.description,
|
||||
MintParams.externalUrl,
|
||||
'app.eth',
|
||||
MintParams.commitHash,
|
||||
MintParams.gitRepository,
|
||||
MintParams.logo,
|
||||
MintParams.color,
|
||||
MintParams.accessPointAutoApprovalSettings,
|
||||
owner.address
|
||||
)
|
||||
).to.be.revertedWithCustomError(contract, Errors.MustBeENSOwner);
|
||||
});
|
||||
|
||||
it('should not allow set ENS if not ENS owner', async () => {
|
||||
const { contract, owner } = fixture;
|
||||
|
||||
await transferENSNode('app.eth', owner);
|
||||
|
||||
await contract.mint(
|
||||
owner.address,
|
||||
MintParams.name,
|
||||
MintParams.description,
|
||||
MintParams.externalUrl,
|
||||
'app.eth',
|
||||
MintParams.commitHash,
|
||||
MintParams.gitRepository,
|
||||
MintParams.logo,
|
||||
MintParams.color,
|
||||
MintParams.accessPointAutoApprovalSettings,
|
||||
owner.address
|
||||
);
|
||||
|
||||
await expect(
|
||||
contract.setTokenENS(0, 'subdomain.app.eth')
|
||||
).to.be.revertedWithCustomError(contract, Errors.MustBeENSOwner);
|
||||
});
|
||||
});
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import { loadFixture } from '@nomicfoundation/hardhat-network-helpers';
|
||||
import { expect } from 'chai';
|
||||
import { TestConstants, Fixtures, parseTokenURI } from './helpers';
|
||||
import { TestConstants, Fixtures } from './helpers';
|
||||
import { ethers } from 'hardhat';
|
||||
const { MintParams } = TestConstants;
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ export const Errors = Object.freeze({
|
|||
AccessPointAlreadyExists: 'AccessPointAlreadyExists',
|
||||
AccessPointScoreCannotBeLower: 'AccessPointScoreCannotBeLower',
|
||||
MustBeAccessPointOwner: 'MustBeAccessPointOwner',
|
||||
MustBeTokenVerifier: 'MustBeTokenVerifier',
|
||||
MustHaveCollectionRole: 'MustHaveCollectionRole',
|
||||
MustHaveTokenRole: 'MustHaveTokenRole',
|
||||
MustHaveAtLeastOneOwner: 'MustHaveAtLeastOneOwner',
|
||||
|
|
@ -14,4 +15,5 @@ export const Errors = Object.freeze({
|
|||
PausableIsSetTo: 'PausableIsSetTo',
|
||||
ThereIsNoTokenMinted: 'ThereIsNoTokenMinted',
|
||||
RequiredPayment: 'RequiredPayment',
|
||||
MustBeENSOwner: 'MustBeENSOwner',
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,11 +1,14 @@
|
|||
import { ethers, upgrades } from 'hardhat';
|
||||
import { TestConstants } from './constants';
|
||||
import { transferENSNode } from './utils';
|
||||
|
||||
export abstract class Fixtures {
|
||||
static async default() {
|
||||
// Contracts are deployed using the first signer/account by default
|
||||
const [owner, otherAccount] = await ethers.getSigners();
|
||||
|
||||
await transferENSNode(TestConstants.MintParams.ens, owner);
|
||||
|
||||
const libraries = {
|
||||
FleekSVG: (await (await ethers.getContractFactory('FleekSVG')).deploy())
|
||||
.address,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,9 @@
|
|||
import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers';
|
||||
import { ethers } from 'hardhat';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const namehash = require('@ensdomains/eth-ens-namehash');
|
||||
|
||||
export const parseTokenURI = (tokenURI: string) => {
|
||||
const tokenURIDecoded = Buffer.from(
|
||||
tokenURI.replace('data:application/json;base64,', ''),
|
||||
|
|
@ -6,3 +12,25 @@ export const parseTokenURI = (tokenURI: string) => {
|
|||
|
||||
return JSON.parse(tokenURIDecoded);
|
||||
};
|
||||
|
||||
export const getENSNode = (name: string) => {
|
||||
return namehash.hash(namehash.normalize(name));
|
||||
};
|
||||
|
||||
export const transferENSNode = async (name: string, to: SignerWithAddress) => {
|
||||
const ens = await ethers.getContractAt(
|
||||
'ENS',
|
||||
'0x00000000000C2E074eC69A0dFb2997BA6C7d2e1e'
|
||||
);
|
||||
|
||||
const ensNode = getENSNode(name);
|
||||
|
||||
const ensOwner = await ethers.getImpersonatedSigner(await ens.owner(ensNode));
|
||||
|
||||
await to.sendTransaction({
|
||||
to: ensOwner.address,
|
||||
value: ethers.utils.parseEther('1000'),
|
||||
});
|
||||
|
||||
await ens.connect(ensOwner).setOwner(ensNode, to.address);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { loadFixture } from '@nomicfoundation/hardhat-network-helpers';
|
|||
import { expect } from 'chai';
|
||||
import { TestConstants, Fixtures, Errors } from './helpers';
|
||||
|
||||
const { MintParams, CollectionRoles, TokenRoles } = TestConstants;
|
||||
const { MintParams, CollectionRoles } = TestConstants;
|
||||
|
||||
describe('FleekERC721.Pausable', () => {
|
||||
let fixture: Awaited<ReturnType<typeof Fixtures.default>>;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { loadFixture } from '@nomicfoundation/hardhat-network-helpers';
|
||||
import { expect } from 'chai';
|
||||
import { TestConstants, Fixtures, parseTokenURI, Errors } from './helpers';
|
||||
import { TestConstants, Fixtures, Errors } from './helpers';
|
||||
|
||||
const { TokenRoles } = TestConstants;
|
||||
|
||||
|
|
@ -25,7 +25,7 @@ describe('FleekERC721.TokenRoles', () => {
|
|||
});
|
||||
|
||||
it('should add a new controller', async () => {
|
||||
const { contract, owner, otherAccount, tokenId } = fixture;
|
||||
const { contract, otherAccount, tokenId } = fixture;
|
||||
await contract.grantTokenRole(
|
||||
tokenId,
|
||||
TokenRoles.Controller,
|
||||
|
|
@ -78,7 +78,7 @@ describe('FleekERC721.TokenRoles', () => {
|
|||
});
|
||||
|
||||
it('should remove an added controller', async () => {
|
||||
const { contract, owner, otherAccount, tokenId } = fixture;
|
||||
const { contract, otherAccount, tokenId } = fixture;
|
||||
await contract.grantTokenRole(
|
||||
tokenId,
|
||||
TokenRoles.Controller,
|
||||
|
|
@ -134,7 +134,7 @@ describe('FleekERC721.TokenRoles', () => {
|
|||
});
|
||||
|
||||
it('should not be able to remove address role', async () => {
|
||||
const { contract, owner, otherAccount, tokenId } = fixture;
|
||||
const { contract, otherAccount, tokenId } = fixture;
|
||||
|
||||
await expect(
|
||||
contract
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ describe('FleekERC721.TokenURI', () => {
|
|||
image: TestConstants.ResultantImage.Default,
|
||||
external_url: TestConstants.MintParams.externalUrl,
|
||||
access_point_auto_approval: false,
|
||||
verified: false,
|
||||
attributes: [
|
||||
{
|
||||
trait_type: 'ENS',
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { loadFixture } from '@nomicfoundation/hardhat-network-helpers';
|
||||
import { expect } from 'chai';
|
||||
import { TestConstants, Fixtures, Events } from './helpers';
|
||||
import { TestConstants, Fixtures, Events, transferENSNode } from './helpers';
|
||||
|
||||
const {
|
||||
Logos: { 1: Logo1 },
|
||||
|
|
@ -25,9 +25,11 @@ describe('FleekERC721.UpdateProperties', () => {
|
|||
it('should emit event for ens change', async () => {
|
||||
const { contract, tokenId, owner } = fixture;
|
||||
|
||||
await expect(contract.setTokenENS(tokenId, 'app.eth'))
|
||||
await transferENSNode('subdomain.app.eth', owner);
|
||||
|
||||
await expect(contract.setTokenENS(tokenId, 'subdomain.app.eth'))
|
||||
.to.emit(contract, Events.MetadataUpdate.string)
|
||||
.withArgs(tokenId, 'ENS', 'app.eth', owner.address);
|
||||
.withArgs(tokenId, 'ENS', 'subdomain.app.eth', owner.address);
|
||||
});
|
||||
|
||||
it('should emit event for name change', async () => {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,55 @@
|
|||
import { loadFixture } from '@nomicfoundation/hardhat-network-helpers';
|
||||
import { expect } from 'chai';
|
||||
import { Errors, Fixtures, TestConstants } from './helpers';
|
||||
|
||||
describe('FleekERC721.GetToken', () => {
|
||||
let fixture: Awaited<ReturnType<typeof Fixtures.withMint>>;
|
||||
|
||||
beforeEach(async () => {
|
||||
fixture = await loadFixture(Fixtures.withMint);
|
||||
});
|
||||
|
||||
it('should mint token in not verified state', async () => {
|
||||
const { contract, tokenId } = fixture;
|
||||
|
||||
expect(await contract.isTokenVerified(tokenId)).to.be.false;
|
||||
});
|
||||
|
||||
it('should set token to verified state', async () => {
|
||||
const { contract, tokenId } = fixture;
|
||||
|
||||
await contract.setTokenVerified(tokenId, true);
|
||||
|
||||
expect(await contract.isTokenVerified(tokenId)).to.be.true;
|
||||
});
|
||||
|
||||
it('should set token to verified and unverified states', async () => {
|
||||
const { contract, tokenId } = fixture;
|
||||
|
||||
await contract.setTokenVerified(tokenId, true);
|
||||
await contract.setTokenVerified(tokenId, false);
|
||||
|
||||
expect(await contract.isTokenVerified(tokenId)).to.be.false;
|
||||
});
|
||||
|
||||
it('should revert for non verifier call', async () => {
|
||||
const { contract, tokenId, otherAccount } = fixture;
|
||||
|
||||
await expect(contract.connect(otherAccount).setTokenVerified(tokenId, true))
|
||||
.to.be.revertedWithCustomError(contract, Errors.MustHaveCollectionRole)
|
||||
.withArgs(TestConstants.CollectionRoles.Verifier);
|
||||
});
|
||||
|
||||
it('should revert for non token verifier', async () => {
|
||||
const { contract, tokenId, otherAccount } = fixture;
|
||||
|
||||
await contract.grantCollectionRole(
|
||||
TestConstants.CollectionRoles.Verifier,
|
||||
otherAccount.address
|
||||
);
|
||||
|
||||
await expect(contract.connect(otherAccount).setTokenVerified(tokenId, true))
|
||||
.to.be.revertedWithCustomError(contract, Errors.MustBeTokenVerifier)
|
||||
.withArgs(tokenId);
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,109 @@
|
|||
import { expect } from 'chai';
|
||||
import * as hre from 'hardhat';
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
import deploy from '../../../scripts/deploy';
|
||||
import { getImplementationAddress } from '@openzeppelin/upgrades-core';
|
||||
import { Contract } from 'ethers';
|
||||
import { loadFixture } from '@nomicfoundation/hardhat-network-helpers';
|
||||
import { Errors, TestConstants } from '../contracts/FleekERC721/helpers';
|
||||
|
||||
const taskArgs = {
|
||||
newProxyInstance: false,
|
||||
name: 'FleekNFAs',
|
||||
symbol: 'FLKNFA',
|
||||
billing: [],
|
||||
};
|
||||
|
||||
const getImplementationContract = async (
|
||||
proxyAddress: string
|
||||
): Promise<Contract> => {
|
||||
const implementationAddress = await getImplementationAddress(
|
||||
hre.network.provider,
|
||||
proxyAddress
|
||||
);
|
||||
return hre.ethers.getContractAt('FleekERC721', implementationAddress);
|
||||
};
|
||||
|
||||
const deployFixture = async () => {
|
||||
const [owner] = await hre.ethers.getSigners();
|
||||
|
||||
const proxy = (await deploy(taskArgs, hre)) as Contract;
|
||||
|
||||
const implementation = await getImplementationContract(proxy.address);
|
||||
|
||||
return { proxy, implementation, owner };
|
||||
};
|
||||
|
||||
describe('Deploy', () => {
|
||||
let fixture: Awaited<ReturnType<typeof deployFixture>>;
|
||||
|
||||
// Suppress console.log
|
||||
const logger = console.log;
|
||||
before(() => {
|
||||
console.log = () => undefined;
|
||||
});
|
||||
after(() => {
|
||||
console.log = logger;
|
||||
});
|
||||
// --------------------
|
||||
|
||||
beforeEach(async () => {
|
||||
fixture = await loadFixture(deployFixture);
|
||||
});
|
||||
|
||||
it('should deploy the contract', async () => {
|
||||
const { proxy, implementation } = fixture;
|
||||
|
||||
expect(proxy.address).to.be.a('string');
|
||||
expect(implementation.address).to.be.a('string');
|
||||
expect(proxy.address).to.be.not.equal(implementation.address);
|
||||
});
|
||||
|
||||
it('should have proxy unpaused and implementation paused', async () => {
|
||||
const { proxy, implementation } = fixture;
|
||||
|
||||
expect(await proxy.isPaused()).to.be.false;
|
||||
expect(await implementation.isPaused()).to.be.true;
|
||||
});
|
||||
|
||||
it('should not allow initialize implementation contract', async () => {
|
||||
const { implementation } = fixture;
|
||||
|
||||
await expect(
|
||||
implementation.initialize(
|
||||
taskArgs.name,
|
||||
taskArgs.symbol,
|
||||
taskArgs.billing
|
||||
)
|
||||
).to.be.revertedWith('Initializable: contract is already initialized');
|
||||
});
|
||||
|
||||
it('should have owner on proxy but not on implementation', async () => {
|
||||
const { proxy, implementation, owner } = fixture;
|
||||
|
||||
expect(await proxy.hasCollectionRole(0, owner.address)).to.be.true;
|
||||
expect(await implementation.hasCollectionRole(0, owner.address)).to.be
|
||||
.false;
|
||||
});
|
||||
|
||||
it('should not allow mint on implementation contract', async () => {
|
||||
const { implementation, owner } = fixture;
|
||||
|
||||
await expect(
|
||||
implementation.mint(
|
||||
owner.address,
|
||||
TestConstants.MintParams.name,
|
||||
TestConstants.MintParams.description,
|
||||
TestConstants.MintParams.externalUrl,
|
||||
TestConstants.MintParams.ens,
|
||||
TestConstants.MintParams.commitHash,
|
||||
TestConstants.MintParams.gitRepository,
|
||||
TestConstants.MintParams.logo,
|
||||
TestConstants.MintParams.color,
|
||||
TestConstants.MintParams.accessPointAutoApprovalSettings,
|
||||
owner.address
|
||||
)
|
||||
).to.be.revertedWithCustomError(implementation, Errors.ContractIsPaused);
|
||||
});
|
||||
});
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
import { expect } from 'chai';
|
||||
import {
|
||||
proxyStore,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
{
|
||||
"extends": "../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"allowJs": true,
|
||||
"noEmit": true
|
||||
},
|
||||
"include": ["./**/*"]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,33 @@
|
|||
dependencies:
|
||||
"@jridgewell/trace-mapping" "0.3.9"
|
||||
|
||||
"@ensdomains/buffer@^0.1.1":
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@ensdomains/buffer/-/buffer-0.1.1.tgz#6c275ba7e457e935405b67876f1f0d980c8baa63"
|
||||
integrity sha512-92SfSiNS8XorgU7OUBHo/i1ZU7JV7iz/6bKuLPNVsMxV79/eI7fJR6jfJJc40zAHjs3ha+Xo965Idomlq3rqnw==
|
||||
|
||||
"@ensdomains/ens-contracts@^0.0.20":
|
||||
version "0.0.20"
|
||||
resolved "https://registry.yarnpkg.com/@ensdomains/ens-contracts/-/ens-contracts-0.0.20.tgz#346eac70d666a7864142287ce1759b0f44bd8a5e"
|
||||
integrity sha512-lAHQBVj2WtgbchcrE8ZuFI6DFq+O33wkLAGqsO2gcnn0EUJb65OJIdTqUfvfULKGJjkB2pyHfS/RgMSIW6h1Pw==
|
||||
dependencies:
|
||||
"@ensdomains/buffer" "^0.1.1"
|
||||
"@ensdomains/solsha1" "0.0.3"
|
||||
"@openzeppelin/contracts" "^4.1.0"
|
||||
dns-packet "^5.3.0"
|
||||
|
||||
"@ensdomains/eth-ens-namehash@^2.0.15":
|
||||
version "2.0.15"
|
||||
resolved "https://registry.yarnpkg.com/@ensdomains/eth-ens-namehash/-/eth-ens-namehash-2.0.15.tgz#5e5f2f24ba802aff8bc19edd822c9a11200cdf4a"
|
||||
integrity sha512-JRDFP6+Hczb1E0/HhIg0PONgBYasfGfDheujmfxaZaAv/NAH4jE6Kf48WbqfRZdxt4IZI3jl3Ri7sZ1nP09lgw==
|
||||
|
||||
"@ensdomains/solsha1@0.0.3":
|
||||
version "0.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@ensdomains/solsha1/-/solsha1-0.0.3.tgz#fd479da9d40aadb59ff4fb4ec50632e7d2275a83"
|
||||
integrity sha512-uhuG5LzRt/UJC0Ux83cE2rCKwSleRePoYdQVcqPN1wyf3/ekMzT/KZUF9+v7/AG5w9jlMLCQkUM50vfjr0Yu9Q==
|
||||
dependencies:
|
||||
hash-test-vectors "^1.3.2"
|
||||
|
||||
"@ethereumjs/common@2.5.0":
|
||||
version "2.5.0"
|
||||
resolved "https://registry.yarnpkg.com/@ethereumjs/common/-/common-2.5.0.tgz#ec61551b31bef7a69d1dc634d8932468866a4268"
|
||||
|
|
@ -398,6 +425,11 @@
|
|||
"@jridgewell/resolve-uri" "^3.0.3"
|
||||
"@jridgewell/sourcemap-codec" "^1.4.10"
|
||||
|
||||
"@leichtgewicht/ip-codec@^2.0.1":
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b"
|
||||
integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==
|
||||
|
||||
"@metamask/eth-sig-util@^4.0.0":
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@metamask/eth-sig-util/-/eth-sig-util-4.0.1.tgz#3ad61f6ea9ad73ba5b19db780d40d9aae5157088"
|
||||
|
|
@ -700,6 +732,11 @@
|
|||
resolved "https://registry.yarnpkg.com/@openzeppelin/contracts-upgradeable/-/contracts-upgradeable-4.8.1.tgz#363f7dd08f25f8f77e16d374350c3d6b43340a7a"
|
||||
integrity sha512-1wTv+20lNiC0R07jyIAbHU7TNHKRwGiTGRfiNnA8jOWjKT98g5OgLpYWOi40Vgpk8SPLA9EvfJAbAeIyVn+7Bw==
|
||||
|
||||
"@openzeppelin/contracts@^4.1.0":
|
||||
version "4.8.2"
|
||||
resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.8.2.tgz#d815ade0027b50beb9bcca67143c6bcc3e3923d6"
|
||||
integrity sha512-kEUOgPQszC0fSYWpbh2kT94ltOJwj1qfT2DWo+zVttmGmf97JZ99LspePNaeeaLhCImaHVeBbjaQFZQn7+Zc5g==
|
||||
|
||||
"@openzeppelin/contracts@^4.7.3":
|
||||
version "4.8.1"
|
||||
resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.8.1.tgz#709cfc4bbb3ca9f4460d60101f15dac6b7a2d5e4"
|
||||
|
|
@ -2155,6 +2192,13 @@ dir-glob@^3.0.1:
|
|||
dependencies:
|
||||
path-type "^4.0.0"
|
||||
|
||||
dns-packet@^5.3.0:
|
||||
version "5.4.0"
|
||||
resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.4.0.tgz#1f88477cf9f27e78a213fb6d118ae38e759a879b"
|
||||
integrity sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g==
|
||||
dependencies:
|
||||
"@leichtgewicht/ip-codec" "^2.0.1"
|
||||
|
||||
dom-walk@^0.1.0:
|
||||
version "0.1.2"
|
||||
resolved "https://registry.yarnpkg.com/dom-walk/-/dom-walk-0.1.2.tgz#0c548bef048f4d1f2a97249002236060daa3fd84"
|
||||
|
|
@ -3302,6 +3346,11 @@ hash-base@^3.0.0:
|
|||
readable-stream "^3.6.0"
|
||||
safe-buffer "^5.2.0"
|
||||
|
||||
hash-test-vectors@^1.3.2:
|
||||
version "1.3.2"
|
||||
resolved "https://registry.yarnpkg.com/hash-test-vectors/-/hash-test-vectors-1.3.2.tgz#f050fde1aff46ec28dcf4f70e4e3238cd5000f4c"
|
||||
integrity sha512-PKd/fitmsrlWGh3OpKbgNLE04ZQZsvs1ZkuLoQpeIKuwx+6CYVNdW6LaPIS1QAdZvV40+skk0w4YomKnViUnvQ==
|
||||
|
||||
hash.js@1.1.3:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.3.tgz#340dedbe6290187151c1ea1d777a3448935df846"
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@
|
|||
"@typescript-eslint/parser": "^5.54.1",
|
||||
"eslint": "^8.35.0",
|
||||
"eslint-config-prettier": "^8.7.0",
|
||||
"eslint-plugin-filenames-simple": "^0.8.0",
|
||||
"eslint-plugin-prettier": "^4.2.1",
|
||||
"eslint-plugin-simple-import-sort": "^10.0.0",
|
||||
"husky": "^8.0.2",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
# package directories
|
||||
node_modules
|
||||
jspm_packages
|
||||
|
||||
# Serverless directories
|
||||
.serverless
|
||||
|
||||
# esbuild directories
|
||||
.esbuild
|
||||
|
||||
# output
|
||||
dist
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
## NFA - Serverless
|
||||
|
||||
### Requirements
|
||||
|
||||
This sub-project of NFAs requires Node 18. Specifically, this has been tested with 18.13.0 so far.
|
||||
|
||||
### Setup
|
||||
|
||||
After cloning the repo, ensure you run `yarn` in the root directory. After that, `cd` into the `serverless` directory and alsy run `yarn`.
|
||||
|
||||
If you are deploying, make sure you have your AWS credentials set to environment variables or have setup AWS credentials using the AWS CLI. Please refer to the official AWS documentation [here](https://www.serverless.com/framework/docs/providers/aws/guide/credentials/) to see all the ways to set these credentials.
|
||||
|
||||
Basically, these values need to be set:
|
||||
```
|
||||
export AWS_ACCESS_KEY_ID=value
|
||||
export AWS_SECRET_ACCESS_KEY=value
|
||||
export AWS_SESSION_TOKEN=value
|
||||
```
|
||||
|
||||
You can get these from the main screen after logging in.
|
||||
|
||||
### Running and Testing
|
||||
|
||||
You first build the code by running `yarn build`. This will produce the bundle file in the `dist` directory.
|
||||
|
||||
TODO: `yarn test`
|
||||
|
||||
To run locally, use `SLS_DEBUG=* yarn sls offline --verbose`. You can then hit the endpoints displayed in the console using curl, postman or any HTTP client.
|
||||
|
||||
### Deploying
|
||||
|
||||
To deploy, make sure you have AWS credentials set in your local environment.
|
||||
|
||||
To deploy to development environment:
|
||||
`yarn sls deploy --stage dev`
|
||||
|
||||
To deploy to production environment:
|
||||
`yarn sls deploy --stage prd`
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
{
|
||||
"name": "@fleekxyz/nfa-serverless",
|
||||
"version": "0.0.1",
|
||||
"description": "The serverless stack for the NFA application",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"build": "yarn tsc",
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "fleek",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@serverless/typescript": "^3.27.0",
|
||||
"@types/aws-lambda": "^8.10.114",
|
||||
"@types/node": "^18.15.5",
|
||||
"@types/uuid": "^9.0.1",
|
||||
"esbuild": "^0.17.12",
|
||||
"json-schema-to-ts": "^2.7.2",
|
||||
"serverless": "^3.28.1",
|
||||
"serverless-esbuild": "^1.42.0",
|
||||
"serverless-offline": "^12.0.4",
|
||||
"ts-node": "^10.9.1",
|
||||
"tsconfig-paths": "^4.1.2",
|
||||
"typescript": "^5.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@middy/core": "^4.2.7",
|
||||
"@middy/http-json-body-parser": "^4.2.7",
|
||||
"@middy/http-response-serializer": "^4.2.8",
|
||||
"aws-sdk": "^2.1342.0",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
service: nfa-serverless
|
||||
frameworkVersion: '3'
|
||||
|
||||
plugins:
|
||||
- serverless-esbuild
|
||||
- serverless-offline
|
||||
|
||||
provider:
|
||||
name: aws
|
||||
runtime: nodejs18.x
|
||||
stage: ${opt:stage, 'prd'}
|
||||
region: ${opt:region, 'us-west-2'}
|
||||
apiGateway:
|
||||
minimumCompressionSize: 1024
|
||||
shouldStartNameWithService: true
|
||||
environment:
|
||||
DEBUG: '*'
|
||||
AWS_STAGE: ${self:provider.stage}
|
||||
AWS_NODEJS_CONNECTION_REUSE_ENABLED: 1
|
||||
|
||||
custom:
|
||||
esbuild:
|
||||
bundle: true
|
||||
minify: true
|
||||
sourcemap: false
|
||||
exclude:
|
||||
- 'aws-sdk'
|
||||
target: 'node18'
|
||||
platform: 'node'
|
||||
concurrency: 10
|
||||
|
||||
functions:
|
||||
submitBuildInfo:
|
||||
handler: dist/functions/builds/handler.submitBuildInfo
|
||||
events:
|
||||
- http:
|
||||
path: build
|
||||
method: post
|
||||
cors: true
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import { APIGatewayProxyResult } from 'aws-lambda';
|
||||
import { formatJSONResponse } from '@libs/api-gateway';
|
||||
|
||||
import { v4 } from 'uuid';
|
||||
|
||||
export const submitBuildInfo = async (): Promise<APIGatewayProxyResult> => {
|
||||
try {
|
||||
const id = v4();
|
||||
const buildInfo = {
|
||||
buildId: id,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
return formatJSONResponse({
|
||||
buildInfo,
|
||||
});
|
||||
} catch (e) {
|
||||
return formatJSONResponse({
|
||||
status: 500,
|
||||
message: e,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
import { handlerPath } from '@libs/handler-resolver';
|
||||
|
||||
export const submitBuildInfo = {
|
||||
handler: `${handlerPath(__dirname)}/handler.submitBuildInfo`,
|
||||
events: [
|
||||
{
|
||||
http: {
|
||||
method: 'post',
|
||||
path: 'buildInfo',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
// QUESTION: should we add back in schema verification?
|
||||
|
||||
export const formatJSONResponse = (response: Record<string, unknown>) => {
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(response),
|
||||
};
|
||||
};
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
export const handlerPath = (context: string) => {
|
||||
return `${context.split(process.cwd())[1].substring(1).replace(/\\/g, '/')}`;
|
||||
};
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
import middy from '@middy/core';
|
||||
import middyJsonBodyParser from '@middy/http-json-body-parser';
|
||||
import httpResponseSerializer from '@middy/http-response-serializer';
|
||||
import { Handler } from 'aws-lambda';
|
||||
|
||||
export const middyfy = (handler: Handler) => {
|
||||
return middy(handler)
|
||||
.use(middyJsonBodyParser())
|
||||
.use(
|
||||
httpResponseSerializer({
|
||||
defaultContentType: 'application/json',
|
||||
serializers: [
|
||||
{
|
||||
regex: /^application\/json$/,
|
||||
serializer: (res) => JSON.stringify(res.body || res),
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
};
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
"extends": "../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"moduleResolution": "node",
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"removeComments": true,
|
||||
"outDir": "dist",
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@functions/*": ["src/functions/*"],
|
||||
"@libs/*": ["src/libs/*"]
|
||||
}
|
||||
},
|
||||
"include": ["./**/*", "src/**/*.ts", "serverless.ts"],
|
||||
"exclude": [
|
||||
"node_modules/**/*",
|
||||
".serverless/**/*",
|
||||
".webpack/**/*",
|
||||
"_warmup/**/*",
|
||||
".vscode/**/*"
|
||||
],
|
||||
"ts-node": {
|
||||
"require": ["tsconfig-paths/register"]
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -126,7 +126,3 @@ Finally, you can generate the build that is going to be deployed to the Hosted S
|
|||
The command that should be used for re-deployment purposes is no different than the one that is used to deploy subgraphs in the first place (remember to replace the access token and the github_username/subgraph_name parts):
|
||||
|
||||
`graph deploy --product hosted-service --deploy-key YOUR_ACCESS_TOKEN --version-lable v0.0.1 YOUR_GITHUB_USERNAME/SUBGRAPH_NAME_ON_HOSTED_SERVICE`
|
||||
|
||||
## Testing
|
||||
|
||||
You can run the unit tests found in `./tests/` with `yarn test` or `npm run test`.
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
testsFolder: tests/matchstick/
|
||||
manifestPath: subgraph.yaml
|
||||
|
|
@ -7,14 +7,10 @@
|
|||
"create-local": "graph create --node http://localhost:8020/ FleekNFA",
|
||||
"remove-local": "graph remove --node http://localhost:8020/ FleekNFA",
|
||||
"deploy-local": "graph deploy --node http://localhost:8020/ --ipfs http://localhost:5001 FleekNFA",
|
||||
"test": "graph test",
|
||||
"compile": "cd ../contracts && yarn && yarn compile && cd ../subgraph && yarn codegen"
|
||||
},
|
||||
"dependencies": {
|
||||
"@graphprotocol/graph-cli": "0.37.2",
|
||||
"@graphprotocol/graph-ts": "0.29.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"matchstick-as": "0.5.0"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,110 +1,100 @@
|
|||
import {
|
||||
Address,
|
||||
Bytes,
|
||||
log,
|
||||
store,
|
||||
ethereum,
|
||||
BigInt,
|
||||
} from '@graphprotocol/graph-ts';
|
||||
import { Bytes, log } from '@graphprotocol/graph-ts';
|
||||
|
||||
// Event Imports [based on the yaml config]
|
||||
import {
|
||||
TokenRoleChanged as TokenRoleChangedEvent,
|
||||
CollectionRoleChanged as CollectionRoleChangedEvent,
|
||||
TokenRoleChanged as TokenRoleChangedEvent,
|
||||
CollectionRoleChanged as CollectionRoleChangedEvent,
|
||||
} from '../generated/FleekNFA/FleekNFA';
|
||||
|
||||
// Entity Imports [based on the schema]
|
||||
import {
|
||||
Owner,
|
||||
Token,
|
||||
} from '../generated/schema';
|
||||
import { Owner, Token } from '../generated/schema';
|
||||
|
||||
enum CollectionRoles {
|
||||
Owner,
|
||||
Owner,
|
||||
}
|
||||
|
||||
enum TokenRoles {
|
||||
Controller,
|
||||
Controller,
|
||||
}
|
||||
|
||||
export function handleCollectionRoleChanged(
|
||||
event: CollectionRoleChangedEvent
|
||||
event: CollectionRoleChangedEvent
|
||||
): void {
|
||||
let toAddress = event.params.toAddress;
|
||||
let byAddress = event.params.byAddress;
|
||||
let role = event.params.role;
|
||||
let status = event.params.status;
|
||||
const toAddress = event.params.toAddress;
|
||||
const byAddress = event.params.byAddress;
|
||||
const role = event.params.role;
|
||||
const status = event.params.status;
|
||||
|
||||
if (role === CollectionRoles.Owner) {
|
||||
// Owner role
|
||||
if (status) {
|
||||
// granted
|
||||
let owner = Owner.load(toAddress);
|
||||
if (!owner) {
|
||||
owner = new Owner(toAddress);
|
||||
}
|
||||
owner.collection = true;
|
||||
owner.save();
|
||||
} else {
|
||||
// revoked
|
||||
let owner = Owner.load(toAddress);
|
||||
if (!owner) {
|
||||
log.error(
|
||||
'Owner entity not found. Role: {}, byAddress: {}, toAddress: {}',
|
||||
[role.toString(), byAddress.toHexString(), toAddress.toHexString()]
|
||||
);
|
||||
return;
|
||||
}
|
||||
owner.collection = false;
|
||||
owner.save();
|
||||
}
|
||||
if (role === CollectionRoles.Owner) {
|
||||
// Owner role
|
||||
if (status) {
|
||||
// granted
|
||||
let owner = Owner.load(toAddress);
|
||||
if (!owner) {
|
||||
owner = new Owner(toAddress);
|
||||
}
|
||||
owner.collection = true;
|
||||
owner.save();
|
||||
} else {
|
||||
log.error('Role not supported. Role: {}, byAddress: {}, toAddress: {}', [
|
||||
role.toString(),
|
||||
byAddress.toHexString(),
|
||||
toAddress.toHexString(),
|
||||
]);
|
||||
// revoked
|
||||
const owner = Owner.load(toAddress);
|
||||
if (!owner) {
|
||||
log.error(
|
||||
'Owner entity not found. Role: {}, byAddress: {}, toAddress: {}',
|
||||
[role.toString(), byAddress.toHexString(), toAddress.toHexString()]
|
||||
);
|
||||
return;
|
||||
}
|
||||
owner.collection = false;
|
||||
owner.save();
|
||||
}
|
||||
} else {
|
||||
log.error('Role not supported. Role: {}, byAddress: {}, toAddress: {}', [
|
||||
role.toString(),
|
||||
byAddress.toHexString(),
|
||||
toAddress.toHexString(),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
export function handleTokenRoleChanged(event: TokenRoleChangedEvent): void {
|
||||
let tokenId = event.params.tokenId;
|
||||
let toAddress = event.params.toAddress;
|
||||
let byAddress = event.params.byAddress;
|
||||
let role = event.params.role;
|
||||
let status = event.params.status;
|
||||
const tokenId = event.params.tokenId;
|
||||
const toAddress = event.params.toAddress;
|
||||
const byAddress = event.params.byAddress;
|
||||
const role = event.params.role;
|
||||
const status = event.params.status;
|
||||
|
||||
// load token
|
||||
let token = Token.load(Bytes.fromByteArray(Bytes.fromBigInt(tokenId)));
|
||||
if (!token) {
|
||||
log.error('Token not found. TokenId: {}', [tokenId.toString()]);
|
||||
return;
|
||||
// load token
|
||||
const token = Token.load(Bytes.fromByteArray(Bytes.fromBigInt(tokenId)));
|
||||
if (!token) {
|
||||
log.error('Token not found. TokenId: {}', [tokenId.toString()]);
|
||||
return;
|
||||
}
|
||||
|
||||
if (role === TokenRoles.Controller) {
|
||||
// Controller role
|
||||
// get the list of controllers.
|
||||
let token_controllers = token.controllers;
|
||||
if (!token_controllers) {
|
||||
token_controllers = [];
|
||||
}
|
||||
|
||||
if (role === TokenRoles.Controller) {
|
||||
// Controller role
|
||||
// get the list of controllers.
|
||||
let token_controllers = token.controllers;
|
||||
if (!token_controllers) {
|
||||
token_controllers = [];
|
||||
}
|
||||
if (status) {
|
||||
// granted
|
||||
token_controllers.push(toAddress);
|
||||
} else {
|
||||
// revoked
|
||||
// remove address from the controllers list
|
||||
const index = token_controllers.indexOf(event.params.toAddress, 0);
|
||||
if (index > -1) {
|
||||
token_controllers.splice(index, 1);
|
||||
}
|
||||
}
|
||||
token.controllers = token_controllers;
|
||||
if (status) {
|
||||
// granted
|
||||
token_controllers.push(toAddress);
|
||||
} else {
|
||||
log.error('Role not supported. Role: {}, byAddress: {}, toAddress: {}', [
|
||||
role.toString(),
|
||||
byAddress.toHexString(),
|
||||
toAddress.toHexString(),
|
||||
]);
|
||||
// revoked
|
||||
// remove address from the controllers list
|
||||
const index = token_controllers.indexOf(event.params.toAddress, 0);
|
||||
if (index > -1) {
|
||||
token_controllers.splice(index, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
token.controllers = token_controllers;
|
||||
} else {
|
||||
log.error('Role not supported. Role: {}, byAddress: {}, toAddress: {}', [
|
||||
role.toString(),
|
||||
byAddress.toHexString(),
|
||||
toAddress.toHexString(),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,26 +1,16 @@
|
|||
import {
|
||||
Address,
|
||||
Bytes,
|
||||
log,
|
||||
store,
|
||||
ethereum,
|
||||
BigInt,
|
||||
} from '@graphprotocol/graph-ts';
|
||||
import { Bytes, log, BigInt } from '@graphprotocol/graph-ts';
|
||||
|
||||
// Event Imports [based on the yaml config]
|
||||
import {
|
||||
ChangeAccessPointCreationStatus as ChangeAccessPointCreationStatusEvent,
|
||||
ChangeAccessPointScore as ChangeAccessPointCreationScoreEvent,
|
||||
NewAccessPoint as NewAccessPointEvent,
|
||||
ChangeAccessPointNameVerify as ChangeAccessPointNameVerifyEvent,
|
||||
ChangeAccessPointContentVerify as ChangeAccessPointContentVerifyEvent,
|
||||
ChangeAccessPointCreationStatus as ChangeAccessPointCreationStatusEvent,
|
||||
ChangeAccessPointScore as ChangeAccessPointCreationScoreEvent,
|
||||
NewAccessPoint as NewAccessPointEvent,
|
||||
ChangeAccessPointNameVerify as ChangeAccessPointNameVerifyEvent,
|
||||
ChangeAccessPointContentVerify as ChangeAccessPointContentVerifyEvent,
|
||||
} from '../generated/FleekNFA/FleekNFA';
|
||||
|
||||
// Entity Imports [based on the schema]
|
||||
import {
|
||||
AccessPoint,
|
||||
Owner,
|
||||
} from '../generated/schema';
|
||||
import { AccessPoint, Owner } from '../generated/schema';
|
||||
|
||||
/**
|
||||
* This handler will create and load entities in the following order:
|
||||
|
|
@ -29,133 +19,133 @@ import {
|
|||
* Note to discuss later: Should a `NewAccessPoint` entity be also created and defined?
|
||||
*/
|
||||
export function handleNewAccessPoint(event: NewAccessPointEvent): void {
|
||||
// Create an AccessPoint entity
|
||||
let accessPointEntity = new AccessPoint(event.params.apName);
|
||||
accessPointEntity.score = BigInt.fromU32(0);
|
||||
accessPointEntity.contentVerified = false;
|
||||
accessPointEntity.nameVerified = false;
|
||||
accessPointEntity.creationStatus = 'DRAFT'; // Since a `ChangeAccessPointCreationStatus` event is emitted instantly after `NewAccessPoint`, the status will be updated in that handler.
|
||||
accessPointEntity.owner = event.params.owner;
|
||||
accessPointEntity.token = Bytes.fromByteArray(
|
||||
Bytes.fromBigInt(event.params.tokenId)
|
||||
);
|
||||
// Create an AccessPoint entity
|
||||
const accessPointEntity = new AccessPoint(event.params.apName);
|
||||
accessPointEntity.score = BigInt.fromU32(0);
|
||||
accessPointEntity.contentVerified = false;
|
||||
accessPointEntity.nameVerified = false;
|
||||
accessPointEntity.creationStatus = 'DRAFT'; // Since a `ChangeAccessPointCreationStatus` event is emitted instantly after `NewAccessPoint`, the status will be updated in that handler.
|
||||
accessPointEntity.owner = event.params.owner;
|
||||
accessPointEntity.token = Bytes.fromByteArray(
|
||||
Bytes.fromBigInt(event.params.tokenId)
|
||||
);
|
||||
|
||||
// Load / Create an Owner entity
|
||||
let ownerEntity = Owner.load(event.params.owner);
|
||||
// Load / Create an Owner entity
|
||||
let ownerEntity = Owner.load(event.params.owner);
|
||||
|
||||
if (!ownerEntity) {
|
||||
// Create a new owner entity
|
||||
ownerEntity = new Owner(event.params.owner);
|
||||
// Since no CollectionRoleChanged event was emitted before for this address, we can set `collection` to false.
|
||||
ownerEntity.collection = false;
|
||||
}
|
||||
if (!ownerEntity) {
|
||||
// Create a new owner entity
|
||||
ownerEntity = new Owner(event.params.owner);
|
||||
// Since no CollectionRoleChanged event was emitted before for this address, we can set `collection` to false.
|
||||
ownerEntity.collection = false;
|
||||
}
|
||||
|
||||
// Save entities.
|
||||
accessPointEntity.save();
|
||||
ownerEntity.save();
|
||||
// Save entities.
|
||||
accessPointEntity.save();
|
||||
ownerEntity.save();
|
||||
}
|
||||
|
||||
/**
|
||||
* This handler will update the status of an access point entity.
|
||||
*/
|
||||
export function handleChangeAccessPointCreationStatus(
|
||||
event: ChangeAccessPointCreationStatusEvent
|
||||
event: ChangeAccessPointCreationStatusEvent
|
||||
): void {
|
||||
// Load the AccessPoint entity
|
||||
let accessPointEntity = AccessPoint.load(event.params.apName);
|
||||
let status = event.params.status;
|
||||
// Load the AccessPoint entity
|
||||
const accessPointEntity = AccessPoint.load(event.params.apName);
|
||||
const status = event.params.status;
|
||||
|
||||
if (accessPointEntity) {
|
||||
switch (status) {
|
||||
case 0:
|
||||
accessPointEntity.creationStatus = 'DRAFT';
|
||||
break;
|
||||
case 1:
|
||||
accessPointEntity.creationStatus = 'APPROVED';
|
||||
break;
|
||||
case 2:
|
||||
accessPointEntity.creationStatus = 'REJECTED';
|
||||
break;
|
||||
case 3:
|
||||
accessPointEntity.creationStatus = 'REMOVED';
|
||||
break;
|
||||
default:
|
||||
// Unknown status
|
||||
log.error(
|
||||
'Unable to handle ChangeAccessPointCreationStatus. Unknown status. Status: {}, AccessPoint: {}',
|
||||
[status.toString(), event.params.apName]
|
||||
);
|
||||
}
|
||||
if (accessPointEntity) {
|
||||
switch (status) {
|
||||
case 0:
|
||||
accessPointEntity.creationStatus = 'DRAFT';
|
||||
break;
|
||||
case 1:
|
||||
accessPointEntity.creationStatus = 'APPROVED';
|
||||
break;
|
||||
case 2:
|
||||
accessPointEntity.creationStatus = 'REJECTED';
|
||||
break;
|
||||
case 3:
|
||||
accessPointEntity.creationStatus = 'REMOVED';
|
||||
break;
|
||||
default:
|
||||
// Unknown status
|
||||
log.error(
|
||||
'Unable to handle ChangeAccessPointCreationStatus. Unknown status. Status: {}, AccessPoint: {}',
|
||||
[status.toString(), event.params.apName]
|
||||
);
|
||||
}
|
||||
|
||||
accessPointEntity.save();
|
||||
} else {
|
||||
// Unknown access point
|
||||
log.error(
|
||||
'Unable to handle ChangeAccessPointCreationStatus. Unknown access point. Status: {}, AccessPoint: {}',
|
||||
[status.toString(), event.params.apName]
|
||||
);
|
||||
}
|
||||
accessPointEntity.save();
|
||||
} else {
|
||||
// Unknown access point
|
||||
log.error(
|
||||
'Unable to handle ChangeAccessPointCreationStatus. Unknown access point. Status: {}, AccessPoint: {}',
|
||||
[status.toString(), event.params.apName]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This handler will update the score of an access point entity.
|
||||
*/
|
||||
export function handleChangeAccessPointScore(
|
||||
event: ChangeAccessPointCreationScoreEvent
|
||||
event: ChangeAccessPointCreationScoreEvent
|
||||
): void {
|
||||
// Load the AccessPoint entity
|
||||
let accessPointEntity = AccessPoint.load(event.params.apName);
|
||||
// Load the AccessPoint entity
|
||||
const accessPointEntity = AccessPoint.load(event.params.apName);
|
||||
|
||||
if (accessPointEntity) {
|
||||
accessPointEntity.score = event.params.score;
|
||||
accessPointEntity.save();
|
||||
} else {
|
||||
// Unknown access point
|
||||
log.error(
|
||||
'Unable to handle ChangeAccessPointScore. Unknown access point. Score: {}, AccessPoint: {}',
|
||||
[event.params.score.toString(), event.params.apName]
|
||||
);
|
||||
}
|
||||
if (accessPointEntity) {
|
||||
accessPointEntity.score = event.params.score;
|
||||
accessPointEntity.save();
|
||||
} else {
|
||||
// Unknown access point
|
||||
log.error(
|
||||
'Unable to handle ChangeAccessPointScore. Unknown access point. Score: {}, AccessPoint: {}',
|
||||
[event.params.score.toString(), event.params.apName]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This handler will update the nameVerified field of an access point entity.
|
||||
*/
|
||||
export function handleChangeAccessPointNameVerify(
|
||||
event: ChangeAccessPointNameVerifyEvent
|
||||
event: ChangeAccessPointNameVerifyEvent
|
||||
): void {
|
||||
// Load the AccessPoint entity
|
||||
let accessPointEntity = AccessPoint.load(event.params.apName);
|
||||
// Load the AccessPoint entity
|
||||
const accessPointEntity = AccessPoint.load(event.params.apName);
|
||||
|
||||
if (accessPointEntity) {
|
||||
accessPointEntity.nameVerified = event.params.verified;
|
||||
accessPointEntity.save();
|
||||
} else {
|
||||
// Unknown access point
|
||||
log.error(
|
||||
'Unable to handle ChangeAccessPointNameVerify. Unknown access point. Verified: {}, AccessPoint: {}',
|
||||
[event.params.verified.toString(), event.params.apName]
|
||||
);
|
||||
}
|
||||
if (accessPointEntity) {
|
||||
accessPointEntity.nameVerified = event.params.verified;
|
||||
accessPointEntity.save();
|
||||
} else {
|
||||
// Unknown access point
|
||||
log.error(
|
||||
'Unable to handle ChangeAccessPointNameVerify. Unknown access point. Verified: {}, AccessPoint: {}',
|
||||
[event.params.verified.toString(), event.params.apName]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This handler will update the contentVerified field of an access point entity.
|
||||
*/
|
||||
export function handleChangeAccessPointContentVerify(
|
||||
event: ChangeAccessPointContentVerifyEvent
|
||||
event: ChangeAccessPointContentVerifyEvent
|
||||
): void {
|
||||
// Load the AccessPoint entity
|
||||
let accessPointEntity = AccessPoint.load(event.params.apName);
|
||||
// Load the AccessPoint entity
|
||||
const accessPointEntity = AccessPoint.load(event.params.apName);
|
||||
|
||||
if (accessPointEntity) {
|
||||
accessPointEntity.contentVerified = event.params.verified;
|
||||
accessPointEntity.save();
|
||||
} else {
|
||||
// Unknown access point
|
||||
log.error(
|
||||
'Unable to handle ChangeAccessPointContentVerify. Unknown access point. Verified: {}, AccessPoint: {}',
|
||||
[event.params.verified.toString(), event.params.apName]
|
||||
);
|
||||
}
|
||||
if (accessPointEntity) {
|
||||
accessPointEntity.contentVerified = event.params.verified;
|
||||
accessPointEntity.save();
|
||||
} else {
|
||||
// Unknown access point
|
||||
log.error(
|
||||
'Unable to handle ChangeAccessPointContentVerify. Unknown access point. Verified: {}, AccessPoint: {}',
|
||||
[event.params.verified.toString(), event.params.apName]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,41 +1,38 @@
|
|||
// Event Imports [based on the yaml config]
|
||||
import {
|
||||
Approval as ApprovalEvent,
|
||||
ApprovalForAll as ApprovalForAllEvent,
|
||||
Approval as ApprovalEvent,
|
||||
ApprovalForAll as ApprovalForAllEvent,
|
||||
} from '../generated/FleekNFA/FleekNFA';
|
||||
|
||||
// Entity Imports [based on the schema]
|
||||
import {
|
||||
Approval,
|
||||
ApprovalForAll,
|
||||
} from '../generated/schema';
|
||||
import { Approval, ApprovalForAll } from '../generated/schema';
|
||||
|
||||
export function handleApproval(event: ApprovalEvent): void {
|
||||
let entity = new Approval(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
entity.owner = event.params.owner;
|
||||
entity.approved = event.params.approved;
|
||||
entity.tokenId = event.params.tokenId;
|
||||
const entity = new Approval(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
entity.owner = event.params.owner;
|
||||
entity.approved = event.params.approved;
|
||||
entity.tokenId = event.params.tokenId;
|
||||
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
|
||||
entity.save();
|
||||
entity.save();
|
||||
}
|
||||
|
||||
export function handleApprovalForAll(event: ApprovalForAllEvent): void {
|
||||
let entity = new ApprovalForAll(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
entity.owner = event.params.owner;
|
||||
entity.operator = event.params.operator;
|
||||
entity.approved = event.params.approved;
|
||||
const entity = new ApprovalForAll(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
entity.owner = event.params.owner;
|
||||
entity.operator = event.params.operator;
|
||||
entity.approved = event.params.approved;
|
||||
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
|
||||
entity.save();
|
||||
}
|
||||
entity.save();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,29 +1,22 @@
|
|||
import {
|
||||
log,
|
||||
ethereum,
|
||||
} from '@graphprotocol/graph-ts';
|
||||
import { log, ethereum } from '@graphprotocol/graph-ts';
|
||||
|
||||
// Event Imports [based on the yaml config]
|
||||
import {
|
||||
Initialized as InitializedEvent,
|
||||
} from '../generated/FleekNFA/FleekNFA';
|
||||
import { Initialized as InitializedEvent } from '../generated/FleekNFA/FleekNFA';
|
||||
|
||||
// Entity Imports [based on the schema]
|
||||
import {
|
||||
Owner,
|
||||
} from '../generated/schema';
|
||||
import { Owner } from '../generated/schema';
|
||||
export function handleInitialized(event: InitializedEvent): void {
|
||||
// This is the contract creation transaction.
|
||||
log.warning('This is the contract creation transaction.', []);
|
||||
if (event.receipt) {
|
||||
let receipt = event.receipt as ethereum.TransactionReceipt;
|
||||
log.warning('Contract address is: {}', [
|
||||
receipt.contractAddress.toHexString(),
|
||||
]);
|
||||
// This is the contract creation transaction.
|
||||
log.warning('This is the contract creation transaction.', []);
|
||||
if (event.receipt) {
|
||||
const receipt = event.receipt as ethereum.TransactionReceipt;
|
||||
log.warning('Contract address is: {}', [
|
||||
receipt.contractAddress.toHexString(),
|
||||
]);
|
||||
|
||||
// add owner
|
||||
let owner = new Owner(event.transaction.from);
|
||||
owner.collection = true;
|
||||
owner.save();
|
||||
}
|
||||
}
|
||||
// add owner
|
||||
const owner = new Owner(event.transaction.from);
|
||||
owner.collection = true;
|
||||
owner.save();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
export * from './access-control';
|
||||
export * from './access-point';
|
||||
export * from './access-point';
|
||||
export * from './approval';
|
||||
export * from './contract';
|
||||
export * from './metadata-update';
|
||||
|
|
|
|||
|
|
@ -2,163 +2,163 @@ import { Bytes } from '@graphprotocol/graph-ts';
|
|||
|
||||
// Event Imports [based on the yaml config]
|
||||
import {
|
||||
MetadataUpdate as MetadataUpdateEvent,
|
||||
MetadataUpdate1 as MetadataUpdateEvent1,
|
||||
MetadataUpdate2 as MetadataUpdateEvent2,
|
||||
MetadataUpdate3 as MetadataUpdateEvent3,
|
||||
MetadataUpdate4 as MetadataUpdateEvent4,
|
||||
MetadataUpdate as MetadataUpdateEvent,
|
||||
MetadataUpdate1 as MetadataUpdateEvent1,
|
||||
MetadataUpdate2 as MetadataUpdateEvent2,
|
||||
MetadataUpdate3 as MetadataUpdateEvent3,
|
||||
MetadataUpdate4 as MetadataUpdateEvent4,
|
||||
} from '../generated/FleekNFA/FleekNFA';
|
||||
|
||||
// Entity Imports [based on the schema]
|
||||
import {
|
||||
GitRepository as GitRepositoryEntity,
|
||||
MetadataUpdate,
|
||||
Token,
|
||||
GitRepository as GitRepositoryEntity,
|
||||
MetadataUpdate,
|
||||
Token,
|
||||
} from '../generated/schema';
|
||||
|
||||
export function handleMetadataUpdateWithStringValue(
|
||||
event: MetadataUpdateEvent1
|
||||
event: MetadataUpdateEvent1
|
||||
): void {
|
||||
/**
|
||||
* Metadata handled here:
|
||||
* setTokenExternalURL
|
||||
* setTokenENS
|
||||
* setTokenName
|
||||
* setTokenDescription
|
||||
* setTokenLogo
|
||||
* */
|
||||
let entity = new MetadataUpdate(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
/**
|
||||
* Metadata handled here:
|
||||
* setTokenExternalURL
|
||||
* setTokenENS
|
||||
* setTokenName
|
||||
* setTokenDescription
|
||||
* setTokenLogo
|
||||
* */
|
||||
const entity = new MetadataUpdate(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
|
||||
entity.tokenId = event.params._tokenId;
|
||||
entity.key = event.params.key;
|
||||
entity.stringValue = event.params.value;
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
entity.tokenId = event.params._tokenId;
|
||||
entity.key = event.params.key;
|
||||
entity.stringValue = event.params.value;
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
|
||||
entity.save();
|
||||
entity.save();
|
||||
|
||||
// UPDATE TOKEN
|
||||
let token = Token.load(
|
||||
Bytes.fromByteArray(Bytes.fromBigInt(event.params._tokenId))
|
||||
);
|
||||
// UPDATE TOKEN
|
||||
const token = Token.load(
|
||||
Bytes.fromByteArray(Bytes.fromBigInt(event.params._tokenId))
|
||||
);
|
||||
|
||||
if (token) {
|
||||
if (event.params.key == 'externalURL') {
|
||||
token.externalURL = event.params.value;
|
||||
} else if (event.params.key == 'ENS') {
|
||||
token.ENS = event.params.value;
|
||||
} else if (event.params.key == 'name') {
|
||||
token.name = event.params.value;
|
||||
} else if (event.params.key == 'description') {
|
||||
token.description = event.params.value;
|
||||
} else {
|
||||
// logo
|
||||
token.logo = event.params.value;
|
||||
}
|
||||
token.save();
|
||||
if (token) {
|
||||
if (event.params.key == 'externalURL') {
|
||||
token.externalURL = event.params.value;
|
||||
} else if (event.params.key == 'ENS') {
|
||||
token.ENS = event.params.value;
|
||||
} else if (event.params.key == 'name') {
|
||||
token.name = event.params.value;
|
||||
} else if (event.params.key == 'description') {
|
||||
token.description = event.params.value;
|
||||
} else {
|
||||
// logo
|
||||
token.logo = event.params.value;
|
||||
}
|
||||
token.save();
|
||||
}
|
||||
}
|
||||
|
||||
export function handleMetadataUpdateWithDoubleStringValue(
|
||||
event: MetadataUpdateEvent3
|
||||
event: MetadataUpdateEvent3
|
||||
): void {
|
||||
/**
|
||||
* setTokenBuild
|
||||
*/
|
||||
let entity = new MetadataUpdate(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
/**
|
||||
* setTokenBuild
|
||||
*/
|
||||
const entity = new MetadataUpdate(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
|
||||
entity.key = event.params.key;
|
||||
entity.tokenId = event.params._tokenId;
|
||||
entity.doubleStringValue = event.params.value;
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
entity.key = event.params.key;
|
||||
entity.tokenId = event.params._tokenId;
|
||||
entity.doubleStringValue = event.params.value;
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
|
||||
entity.save();
|
||||
entity.save();
|
||||
|
||||
// UPDATE TOKEN
|
||||
let token = Token.load(
|
||||
Bytes.fromByteArray(Bytes.fromBigInt(event.params._tokenId))
|
||||
);
|
||||
// UPDATE TOKEN
|
||||
const token = Token.load(
|
||||
Bytes.fromByteArray(Bytes.fromBigInt(event.params._tokenId))
|
||||
);
|
||||
|
||||
if (token) {
|
||||
if (event.params.key == 'build') {
|
||||
let gitRepositoryEntity = GitRepositoryEntity.load(event.params.value[1]);
|
||||
if (!gitRepositoryEntity) {
|
||||
// Create a new gitRepository entity
|
||||
gitRepositoryEntity = new GitRepositoryEntity(event.params.value[1]);
|
||||
}
|
||||
token.commitHash = event.params.value[0];
|
||||
token.gitRepository = event.params.value[1];
|
||||
token.save();
|
||||
gitRepositoryEntity.save();
|
||||
}
|
||||
if (token) {
|
||||
if (event.params.key == 'build') {
|
||||
let gitRepositoryEntity = GitRepositoryEntity.load(event.params.value[1]);
|
||||
if (!gitRepositoryEntity) {
|
||||
// Create a new gitRepository entity
|
||||
gitRepositoryEntity = new GitRepositoryEntity(event.params.value[1]);
|
||||
}
|
||||
token.commitHash = event.params.value[0];
|
||||
token.gitRepository = event.params.value[1];
|
||||
token.save();
|
||||
gitRepositoryEntity.save();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function handleMetadataUpdateWithIntValue(
|
||||
event: MetadataUpdateEvent2
|
||||
event: MetadataUpdateEvent2
|
||||
): void {
|
||||
/**
|
||||
* setTokenColor
|
||||
*/
|
||||
let entity = new MetadataUpdate(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
/**
|
||||
* setTokenColor
|
||||
*/
|
||||
const entity = new MetadataUpdate(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
|
||||
entity.key = event.params.key;
|
||||
entity.tokenId = event.params._tokenId;
|
||||
entity.uint24Value = event.params.value;
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
entity.key = event.params.key;
|
||||
entity.tokenId = event.params._tokenId;
|
||||
entity.uint24Value = event.params.value;
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
|
||||
entity.save();
|
||||
entity.save();
|
||||
|
||||
let token = Token.load(
|
||||
Bytes.fromByteArray(Bytes.fromBigInt(event.params._tokenId))
|
||||
);
|
||||
const token = Token.load(
|
||||
Bytes.fromByteArray(Bytes.fromBigInt(event.params._tokenId))
|
||||
);
|
||||
|
||||
if (token) {
|
||||
if (event.params.key == 'color') {
|
||||
token.color = event.params.value;
|
||||
}
|
||||
token.save();
|
||||
if (token) {
|
||||
if (event.params.key == 'color') {
|
||||
token.color = event.params.value;
|
||||
}
|
||||
token.save();
|
||||
}
|
||||
}
|
||||
|
||||
export function handleMetadataUpdateWithBooleanValue(
|
||||
event: MetadataUpdateEvent4
|
||||
event: MetadataUpdateEvent4
|
||||
): void {
|
||||
/**
|
||||
* accessPointAutoApproval
|
||||
*/
|
||||
let entity = new MetadataUpdate(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
/**
|
||||
* accessPointAutoApproval
|
||||
*/
|
||||
const entity = new MetadataUpdate(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
|
||||
entity.key = event.params.key;
|
||||
entity.tokenId = event.params._tokenId;
|
||||
entity.booleanValue = event.params.value;
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
entity.key = event.params.key;
|
||||
entity.tokenId = event.params._tokenId;
|
||||
entity.booleanValue = event.params.value;
|
||||
entity.blockNumber = event.block.number;
|
||||
entity.blockTimestamp = event.block.timestamp;
|
||||
entity.transactionHash = event.transaction.hash;
|
||||
|
||||
entity.save();
|
||||
entity.save();
|
||||
|
||||
let token = Token.load(
|
||||
Bytes.fromByteArray(Bytes.fromBigInt(event.params._tokenId))
|
||||
);
|
||||
const token = Token.load(
|
||||
Bytes.fromByteArray(Bytes.fromBigInt(event.params._tokenId))
|
||||
);
|
||||
|
||||
if (token) {
|
||||
if (event.params.key == 'accessPointAutoApproval') {
|
||||
token.accessPointAutoApproval = event.params.value;
|
||||
}
|
||||
token.save();
|
||||
if (token) {
|
||||
if (event.params.key == 'accessPointAutoApproval') {
|
||||
token.accessPointAutoApproval = event.params.value;
|
||||
}
|
||||
}
|
||||
token.save();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,90 +1,80 @@
|
|||
import {
|
||||
Bytes,
|
||||
log,
|
||||
} from '@graphprotocol/graph-ts';
|
||||
import { Bytes, log } from '@graphprotocol/graph-ts';
|
||||
|
||||
// Event Imports [based on the yaml config]
|
||||
import {
|
||||
NewMint as NewMintEvent,
|
||||
} from '../generated/FleekNFA/FleekNFA';
|
||||
import { NewMint as NewMintEvent } from '../generated/FleekNFA/FleekNFA';
|
||||
|
||||
// Entity Imports [based on the schema]
|
||||
import {
|
||||
Owner,
|
||||
GitRepository as GitRepositoryEntity,
|
||||
NewMint,
|
||||
Token,
|
||||
} from '../generated/schema';
|
||||
import { Owner, NewMint, Token } from '../generated/schema';
|
||||
|
||||
export function handleNewMint(event: NewMintEvent): void {
|
||||
let newMintEntity = new NewMint(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
const newMintEntity = new NewMint(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
|
||||
let name = event.params.name;
|
||||
let description = event.params.description;
|
||||
let externalURL = event.params.externalURL;
|
||||
let ENS = event.params.ENS;
|
||||
let gitRepository = event.params.gitRepository;
|
||||
let commitHash = event.params.commitHash;
|
||||
let logo = event.params.logo;
|
||||
let color = event.params.color;
|
||||
let accessPointAutoApproval = event.params.accessPointAutoApproval;
|
||||
let tokenId = event.params.tokenId;
|
||||
let ownerAddress = event.params.owner;
|
||||
let verifierAddress = event.params.verifier;
|
||||
const name = event.params.name;
|
||||
const description = event.params.description;
|
||||
const externalURL = event.params.externalURL;
|
||||
const ENS = event.params.ENS;
|
||||
const gitRepository = event.params.gitRepository;
|
||||
const commitHash = event.params.commitHash;
|
||||
const logo = event.params.logo;
|
||||
const color = event.params.color;
|
||||
const accessPointAutoApproval = event.params.accessPointAutoApproval;
|
||||
const tokenId = event.params.tokenId;
|
||||
const ownerAddress = event.params.owner;
|
||||
const verifierAddress = event.params.verifier;
|
||||
|
||||
newMintEntity.tokenId = tokenId;
|
||||
newMintEntity.name = name;
|
||||
newMintEntity.description = description;
|
||||
newMintEntity.externalURL = externalURL;
|
||||
newMintEntity.ENS = ENS;
|
||||
newMintEntity.commitHash = commitHash;
|
||||
newMintEntity.gitRepository = gitRepository;
|
||||
newMintEntity.logo = logo;
|
||||
newMintEntity.color = color;
|
||||
newMintEntity.accessPointAutoApproval = accessPointAutoApproval;
|
||||
newMintEntity.triggeredBy = event.params.minter;
|
||||
newMintEntity.owner = ownerAddress;
|
||||
newMintEntity.verifier = verifierAddress;
|
||||
newMintEntity.blockNumber = event.block.number;
|
||||
newMintEntity.blockTimestamp = event.block.timestamp;
|
||||
newMintEntity.transactionHash = event.transaction.hash;
|
||||
newMintEntity.save();
|
||||
log.error('{}', [tokenId.toString()]);
|
||||
newMintEntity.tokenId = tokenId;
|
||||
newMintEntity.name = name;
|
||||
newMintEntity.description = description;
|
||||
newMintEntity.externalURL = externalURL;
|
||||
newMintEntity.ENS = ENS;
|
||||
newMintEntity.commitHash = commitHash;
|
||||
newMintEntity.gitRepository = gitRepository;
|
||||
newMintEntity.logo = logo;
|
||||
newMintEntity.color = color;
|
||||
newMintEntity.accessPointAutoApproval = accessPointAutoApproval;
|
||||
newMintEntity.triggeredBy = event.params.minter;
|
||||
newMintEntity.owner = ownerAddress;
|
||||
newMintEntity.verifier = verifierAddress;
|
||||
newMintEntity.blockNumber = event.block.number;
|
||||
newMintEntity.blockTimestamp = event.block.timestamp;
|
||||
newMintEntity.transactionHash = event.transaction.hash;
|
||||
newMintEntity.save();
|
||||
log.error('{}', [tokenId.toString()]);
|
||||
|
||||
// Create Token, Owner, and Controller entities
|
||||
// Create Token, Owner, and Controller entities
|
||||
|
||||
let owner = Owner.load(ownerAddress);
|
||||
let token = new Token(Bytes.fromByteArray(Bytes.fromBigInt(tokenId)));
|
||||
let owner = Owner.load(ownerAddress);
|
||||
const token = new Token(Bytes.fromByteArray(Bytes.fromBigInt(tokenId)));
|
||||
|
||||
if (!owner) {
|
||||
// Create a new owner entity
|
||||
owner = new Owner(ownerAddress);
|
||||
// Since no CollectionRoleChanged event was emitted before for this address, we can set `collection` to false.
|
||||
owner.collection = false;
|
||||
}
|
||||
if (!owner) {
|
||||
// Create a new owner entity
|
||||
owner = new Owner(ownerAddress);
|
||||
// Since no CollectionRoleChanged event was emitted before for this address, we can set `collection` to false.
|
||||
owner.collection = false;
|
||||
}
|
||||
|
||||
// Populate Token with data from the event
|
||||
token.tokenId = tokenId;
|
||||
token.name = name;
|
||||
token.description = description;
|
||||
token.externalURL = externalURL;
|
||||
token.ENS = ENS;
|
||||
token.gitRepository = gitRepository;
|
||||
token.commitHash = commitHash;
|
||||
token.logo = logo;
|
||||
token.color = color;
|
||||
token.accessPointAutoApproval = accessPointAutoApproval;
|
||||
token.owner = ownerAddress;
|
||||
token.verifier = verifierAddress;
|
||||
token.mintTransaction = event.transaction.hash.concatI32(
|
||||
event.logIndex.toI32()
|
||||
);
|
||||
token.mintedBy = event.params.minter;
|
||||
token.controllers = [ownerAddress];
|
||||
// Populate Token with data from the event
|
||||
token.tokenId = tokenId;
|
||||
token.name = name;
|
||||
token.description = description;
|
||||
token.externalURL = externalURL;
|
||||
token.ENS = ENS;
|
||||
token.gitRepository = gitRepository;
|
||||
token.commitHash = commitHash;
|
||||
token.logo = logo;
|
||||
token.color = color;
|
||||
token.accessPointAutoApproval = accessPointAutoApproval;
|
||||
token.owner = ownerAddress;
|
||||
token.verifier = verifierAddress;
|
||||
token.mintTransaction = event.transaction.hash.concatI32(
|
||||
event.logIndex.toI32()
|
||||
);
|
||||
token.mintedBy = event.params.minter;
|
||||
token.controllers = [ownerAddress];
|
||||
|
||||
// Save entities
|
||||
owner.save();
|
||||
token.save();
|
||||
}
|
||||
// Save entities
|
||||
owner.save();
|
||||
token.save();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,73 +1,60 @@
|
|||
import {
|
||||
Bytes,
|
||||
log,
|
||||
store
|
||||
} from '@graphprotocol/graph-ts';
|
||||
import { Bytes, log, store } from '@graphprotocol/graph-ts';
|
||||
|
||||
// Event Imports [based on the yaml config]
|
||||
import {
|
||||
Transfer as TransferEvent,
|
||||
} from '../generated/FleekNFA/FleekNFA';
|
||||
import { Transfer as TransferEvent } from '../generated/FleekNFA/FleekNFA';
|
||||
|
||||
// Entity Imports [based on the schema]
|
||||
import {
|
||||
Owner,
|
||||
Token,
|
||||
Transfer,
|
||||
} from '../generated/schema';
|
||||
import { Owner, Token, Transfer } from '../generated/schema';
|
||||
|
||||
export function handleTransfer(event: TransferEvent): void {
|
||||
let transfer = new Transfer(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
|
||||
const TokenId = event.params.tokenId;
|
||||
|
||||
transfer.from = event.params.from;
|
||||
transfer.to = event.params.to;
|
||||
transfer.tokenId = TokenId;
|
||||
|
||||
transfer.blockNumber = event.block.number;
|
||||
transfer.blockTimestamp = event.block.timestamp;
|
||||
transfer.transactionHash = event.transaction.hash;
|
||||
|
||||
transfer.save();
|
||||
|
||||
let token: Token | null;
|
||||
|
||||
let owner_address = event.params.to;
|
||||
let owner = Owner.load(owner_address);
|
||||
|
||||
if (!owner) {
|
||||
// Create a new owner entity
|
||||
owner = new Owner(owner_address);
|
||||
}
|
||||
|
||||
if (parseInt(event.params.from.toHexString()) !== 0) {
|
||||
if (parseInt(event.params.to.toHexString()) === 0) {
|
||||
// Burn
|
||||
// Remove the entity from storage
|
||||
// Its controllers and owner will be affected.
|
||||
store.remove('Token', TokenId.toString());
|
||||
const transfer = new Transfer(
|
||||
event.transaction.hash.concatI32(event.logIndex.toI32())
|
||||
);
|
||||
|
||||
const TokenId = event.params.tokenId;
|
||||
|
||||
transfer.from = event.params.from;
|
||||
transfer.to = event.params.to;
|
||||
transfer.tokenId = TokenId;
|
||||
|
||||
transfer.blockNumber = event.block.number;
|
||||
transfer.blockTimestamp = event.block.timestamp;
|
||||
transfer.transactionHash = event.transaction.hash;
|
||||
|
||||
transfer.save();
|
||||
|
||||
let token: Token | null;
|
||||
|
||||
const owner_address = event.params.to;
|
||||
let owner = Owner.load(owner_address);
|
||||
|
||||
if (!owner) {
|
||||
// Create a new owner entity
|
||||
owner = new Owner(owner_address);
|
||||
}
|
||||
|
||||
if (parseInt(event.params.from.toHexString()) !== 0) {
|
||||
if (parseInt(event.params.to.toHexString()) === 0) {
|
||||
// Burn
|
||||
// Remove the entity from storage
|
||||
// Its controllers and owner will be affected.
|
||||
store.remove('Token', TokenId.toString());
|
||||
} else {
|
||||
// Transfer
|
||||
// Load the Token by using its TokenId
|
||||
token = Token.load(Bytes.fromByteArray(Bytes.fromBigInt(TokenId)));
|
||||
|
||||
if (token) {
|
||||
// Entity exists
|
||||
token.owner = owner_address;
|
||||
|
||||
// Save both entities
|
||||
owner.save();
|
||||
token.save();
|
||||
} else {
|
||||
// Transfer
|
||||
// Load the Token by using its TokenId
|
||||
token = Token.load(
|
||||
Bytes.fromByteArray(Bytes.fromBigInt(TokenId))
|
||||
);
|
||||
|
||||
if (token) {
|
||||
// Entity exists
|
||||
token.owner = owner_address;
|
||||
|
||||
// Save both entities
|
||||
owner.save();
|
||||
token.save();
|
||||
} else {
|
||||
// Entity does not exist
|
||||
log.error('Unknown token was transferred.', []);
|
||||
}
|
||||
// Entity does not exist
|
||||
log.error('Unknown token was transferred.', []);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,11 +3,11 @@ schema:
|
|||
dataSources:
|
||||
- kind: ethereum
|
||||
name: FleekNFA
|
||||
network: mumbai
|
||||
network: goerli
|
||||
source:
|
||||
address: "0x550Ee47Fa9E0B81c1b9C394FeE62Fe699a955519" # <- Proxy Contract | Current implementation contract: 0x9e4a318c788e0097a24146fe7041a0cd93939d56
|
||||
address: "0x8795608346Eb475E42e69F1281008AEAa522479D" # <- Proxy Contract
|
||||
abi: FleekNFA
|
||||
startBlock: 32373064
|
||||
startBlock: 8671990
|
||||
mapping:
|
||||
kind: ethereum/events
|
||||
apiVersion: 0.0.7
|
||||
|
|
@ -32,7 +32,7 @@ dataSources:
|
|||
- ChangeAccessPointAutoApproval
|
||||
abis:
|
||||
- name: FleekNFA
|
||||
file: ../contracts/artifacts/contracts/FleekERC721.sol/FleekERC721.json
|
||||
file: ../contracts/deployments/goerli/FleekERC721.json
|
||||
eventHandlers:
|
||||
- event: Approval(indexed address,indexed address,indexed uint256)
|
||||
handler: handleApproval
|
||||
|
|
|
|||
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"version": "0.5.4",
|
||||
"timestamp": 1679061942846
|
||||
}
|
||||
|
|
@ -1,115 +0,0 @@
|
|||
import {
|
||||
assert,
|
||||
describe,
|
||||
test,
|
||||
clearStore,
|
||||
beforeAll,
|
||||
afterAll,
|
||||
} from 'matchstick-as/assembly/index';
|
||||
import { BigInt, Bytes } from '@graphprotocol/graph-ts';
|
||||
import {
|
||||
createNewAccessPointEvent,
|
||||
createNewChangeAccessPointCreationStatus,
|
||||
handleChangeAccessPointCreationStatusList,
|
||||
handleNewAccessPoints,
|
||||
makeEventId,
|
||||
USER_ONE,
|
||||
USER_TWO,
|
||||
} from '../helpers/utils';
|
||||
import {
|
||||
ChangeAccessPointCreationStatus,
|
||||
NewAccessPoint,
|
||||
} from '../../../generated/FleekNFA/FleekNFA';
|
||||
|
||||
describe('Change Access Point Creation Status tests', () => {
|
||||
beforeAll(() => {
|
||||
// New Access Points
|
||||
let newAccessPoints: NewAccessPoint[] = [];
|
||||
|
||||
// User One has two access points: one for tokenId 0 and one for tokenId 1
|
||||
newAccessPoints.push(
|
||||
createNewAccessPointEvent(0, 'firstAP', BigInt.fromI32(0), USER_ONE)
|
||||
);
|
||||
newAccessPoints.push(
|
||||
createNewAccessPointEvent(1, 'secondAP', BigInt.fromI32(1), USER_ONE)
|
||||
);
|
||||
|
||||
// User Two has one access point for tokenId 0
|
||||
newAccessPoints.push(
|
||||
createNewAccessPointEvent(2, 'thirdAP', BigInt.fromI32(0), USER_TWO)
|
||||
);
|
||||
handleNewAccessPoints(newAccessPoints);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
clearStore();
|
||||
});
|
||||
|
||||
describe('Assertions', () => {
|
||||
test('Check the `creationStatus` field of each access point entity', () => {
|
||||
assert.fieldEquals('AccessPoint', 'firstAP', 'creationStatus', 'DRAFT');
|
||||
assert.fieldEquals('AccessPoint', 'secondAP', 'creationStatus', 'DRAFT');
|
||||
assert.fieldEquals('AccessPoint', 'thirdAP', 'creationStatus', 'DRAFT');
|
||||
});
|
||||
|
||||
test('Check the `creationStatus` field of each access point entity after changing it', () => {
|
||||
// New Access Points
|
||||
let changeAccessPointCreationStatusList: ChangeAccessPointCreationStatus[] =
|
||||
[];
|
||||
|
||||
// User One has two access points: one for tokenId 0 and one for tokenId 1
|
||||
changeAccessPointCreationStatusList.push(
|
||||
createNewChangeAccessPointCreationStatus(
|
||||
0,
|
||||
'firstAP',
|
||||
BigInt.fromI32(0),
|
||||
1,
|
||||
USER_ONE
|
||||
)
|
||||
);
|
||||
changeAccessPointCreationStatusList.push(
|
||||
createNewChangeAccessPointCreationStatus(
|
||||
0,
|
||||
'secondAP',
|
||||
BigInt.fromI32(1),
|
||||
1,
|
||||
USER_ONE
|
||||
)
|
||||
);
|
||||
|
||||
// User Two has one access point for tokenId 0
|
||||
changeAccessPointCreationStatusList.push(
|
||||
createNewChangeAccessPointCreationStatus(
|
||||
0,
|
||||
'thirdAP',
|
||||
BigInt.fromI32(0),
|
||||
1,
|
||||
USER_TWO
|
||||
)
|
||||
);
|
||||
|
||||
handleChangeAccessPointCreationStatusList(
|
||||
changeAccessPointCreationStatusList
|
||||
);
|
||||
|
||||
assert.fieldEquals(
|
||||
'AccessPoint',
|
||||
'firstAP',
|
||||
'creationStatus',
|
||||
'APPROVED'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'AccessPoint',
|
||||
'secondAP',
|
||||
'creationStatus',
|
||||
'APPROVED'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'AccessPoint',
|
||||
'thirdAP',
|
||||
'creationStatus',
|
||||
'APPROVED'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,94 +0,0 @@
|
|||
import {
|
||||
assert,
|
||||
describe,
|
||||
test,
|
||||
clearStore,
|
||||
beforeAll,
|
||||
afterAll,
|
||||
} from 'matchstick-as/assembly/index';
|
||||
import { BigInt } from '@graphprotocol/graph-ts';
|
||||
import {
|
||||
createNewAccessPointEvent,
|
||||
createNewChangeAccessPointNameVerify,
|
||||
handleChangeAccessPointNameVerifies,
|
||||
handleNewAccessPoints,
|
||||
USER_ONE,
|
||||
USER_TWO,
|
||||
} from '../helpers/utils';
|
||||
import {
|
||||
ChangeAccessPointNameVerify,
|
||||
NewAccessPoint,
|
||||
} from '../../../generated/FleekNFA/FleekNFA';
|
||||
|
||||
describe('Change Access Point Name Verify tests', () => {
|
||||
beforeAll(() => {
|
||||
// New Access Points
|
||||
let newAccessPoints: NewAccessPoint[] = [];
|
||||
|
||||
// User One has two access points: one for tokenId 0 and one for tokenId 1
|
||||
newAccessPoints.push(
|
||||
createNewAccessPointEvent(0, 'firstAP', BigInt.fromI32(0), USER_ONE)
|
||||
);
|
||||
newAccessPoints.push(
|
||||
createNewAccessPointEvent(1, 'secondAP', BigInt.fromI32(1), USER_ONE)
|
||||
);
|
||||
|
||||
// User Two has one access point for tokenId 0
|
||||
newAccessPoints.push(
|
||||
createNewAccessPointEvent(2, 'thirdAP', BigInt.fromI32(0), USER_TWO)
|
||||
);
|
||||
handleNewAccessPoints(newAccessPoints);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
clearStore();
|
||||
});
|
||||
|
||||
describe('Assertions', () => {
|
||||
test('Check the `nameVerified` field of each access point entity', () => {
|
||||
assert.fieldEquals('AccessPoint', 'firstAP', 'nameVerified', 'false');
|
||||
assert.fieldEquals('AccessPoint', 'secondAP', 'nameVerified', 'false');
|
||||
assert.fieldEquals('AccessPoint', 'thirdAP', 'nameVerified', 'false');
|
||||
});
|
||||
|
||||
test('Check the `nameVerified` field of each access point entity after changing it', () => {
|
||||
// New Access Point Name Verified fields
|
||||
let changeAccessPointNameVerifies: ChangeAccessPointNameVerify[] = [];
|
||||
|
||||
changeAccessPointNameVerifies.push(
|
||||
createNewChangeAccessPointNameVerify(
|
||||
0,
|
||||
'firstAP',
|
||||
BigInt.fromI32(0),
|
||||
true,
|
||||
USER_ONE
|
||||
)
|
||||
);
|
||||
changeAccessPointNameVerifies.push(
|
||||
createNewChangeAccessPointNameVerify(
|
||||
0,
|
||||
'secondAP',
|
||||
BigInt.fromI32(1),
|
||||
true,
|
||||
USER_ONE
|
||||
)
|
||||
);
|
||||
|
||||
changeAccessPointNameVerifies.push(
|
||||
createNewChangeAccessPointNameVerify(
|
||||
0,
|
||||
'thirdAP',
|
||||
BigInt.fromI32(0),
|
||||
true,
|
||||
USER_TWO
|
||||
)
|
||||
);
|
||||
|
||||
handleChangeAccessPointNameVerifies(changeAccessPointNameVerifies);
|
||||
|
||||
assert.fieldEquals('AccessPoint', 'firstAP', 'nameVerified', 'true');
|
||||
assert.fieldEquals('AccessPoint', 'secondAP', 'nameVerified', 'true');
|
||||
assert.fieldEquals('AccessPoint', 'thirdAP', 'nameVerified', 'true');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,447 +0,0 @@
|
|||
import { newMockEvent } from 'matchstick-as';
|
||||
import { ethereum, Address, BigInt, Bytes } from '@graphprotocol/graph-ts';
|
||||
import {
|
||||
Approval as ApprovalEvent,
|
||||
ApprovalForAll as ApprovalForAllEvent,
|
||||
Transfer as TransferEvent,
|
||||
NewMint as NewMintEvent,
|
||||
NewAccessPoint,
|
||||
ChangeAccessPointCreationStatus,
|
||||
ChangeAccessPointNameVerify,
|
||||
TokenRoleChanged,
|
||||
CollectionRoleChanged,
|
||||
} from '../../../generated/FleekNFA/FleekNFA';
|
||||
import {
|
||||
handleApproval,
|
||||
handleApprovalForAll,
|
||||
handleChangeAccessPointCreationStatus,
|
||||
handleChangeAccessPointNameVerify,
|
||||
handleNewAccessPoint,
|
||||
handleNewMint,
|
||||
handleTransfer,
|
||||
handleTokenRoleChanged,
|
||||
handleCollectionRoleChanged,
|
||||
} from '../../../src/fleek-nfa';
|
||||
|
||||
export function createApprovalEvent(
|
||||
event_count: i32,
|
||||
owner: Address,
|
||||
approved: Address,
|
||||
tokenId: BigInt
|
||||
): ApprovalEvent {
|
||||
let approvalEvent = changetype<ApprovalEvent>(newMockEvent());
|
||||
|
||||
approvalEvent.parameters = new Array();
|
||||
|
||||
approvalEvent.parameters.push(
|
||||
new ethereum.EventParam('owner', ethereum.Value.fromAddress(owner))
|
||||
);
|
||||
approvalEvent.parameters.push(
|
||||
new ethereum.EventParam('approved', ethereum.Value.fromAddress(approved))
|
||||
);
|
||||
approvalEvent.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'tokenId',
|
||||
ethereum.Value.fromUnsignedBigInt(tokenId)
|
||||
)
|
||||
);
|
||||
|
||||
approvalEvent.transaction.hash = Bytes.fromI32(event_count);
|
||||
approvalEvent.logIndex = new BigInt(event_count);
|
||||
|
||||
return approvalEvent;
|
||||
}
|
||||
|
||||
export function createApprovalForAllEvent(
|
||||
event_count: i32,
|
||||
owner: Address,
|
||||
operator: Address,
|
||||
approved: boolean
|
||||
): ApprovalForAllEvent {
|
||||
let approvalForAllEvent = changetype<ApprovalForAllEvent>(newMockEvent());
|
||||
|
||||
approvalForAllEvent.parameters = new Array();
|
||||
|
||||
approvalForAllEvent.parameters.push(
|
||||
new ethereum.EventParam('owner', ethereum.Value.fromAddress(owner))
|
||||
);
|
||||
approvalForAllEvent.parameters.push(
|
||||
new ethereum.EventParam('operator', ethereum.Value.fromAddress(operator))
|
||||
);
|
||||
approvalForAllEvent.parameters.push(
|
||||
new ethereum.EventParam('approved', ethereum.Value.fromBoolean(approved))
|
||||
);
|
||||
|
||||
approvalForAllEvent.transaction.hash = Bytes.fromI32(event_count);
|
||||
approvalForAllEvent.logIndex = new BigInt(event_count);
|
||||
|
||||
return approvalForAllEvent;
|
||||
}
|
||||
|
||||
export function createTransferEvent(
|
||||
event_count: i32,
|
||||
from: Address,
|
||||
to: Address,
|
||||
tokenId: BigInt
|
||||
): TransferEvent {
|
||||
let transferEvent = changetype<TransferEvent>(newMockEvent());
|
||||
|
||||
transferEvent.parameters = new Array();
|
||||
|
||||
transferEvent.parameters.push(
|
||||
new ethereum.EventParam('from', ethereum.Value.fromAddress(from))
|
||||
);
|
||||
transferEvent.parameters.push(
|
||||
new ethereum.EventParam('to', ethereum.Value.fromAddress(to))
|
||||
);
|
||||
transferEvent.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'tokenId',
|
||||
ethereum.Value.fromUnsignedBigInt(tokenId)
|
||||
)
|
||||
);
|
||||
|
||||
transferEvent.transaction.hash = Bytes.fromI32(event_count);
|
||||
transferEvent.logIndex = new BigInt(event_count);
|
||||
|
||||
return transferEvent;
|
||||
}
|
||||
|
||||
export function createNewMintEvent(
|
||||
event_count: i32,
|
||||
to: Address,
|
||||
tokenId: BigInt
|
||||
): NewMintEvent {
|
||||
let newMintEvent = changetype<NewMintEvent>(newMockEvent());
|
||||
|
||||
newMintEvent.parameters = new Array();
|
||||
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'tokenId',
|
||||
ethereum.Value.fromUnsignedBigInt(tokenId)
|
||||
)
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam('name', ethereum.Value.fromString('name'))
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'description',
|
||||
ethereum.Value.fromString('description')
|
||||
)
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'externalURL',
|
||||
ethereum.Value.fromString('externalurl')
|
||||
)
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam('ENS', ethereum.Value.fromString('ens'))
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam('commitHash', ethereum.Value.fromString('hash'))
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam('gitRepository', ethereum.Value.fromString('repo'))
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam('logo', ethereum.Value.fromString('logo'))
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam('color', ethereum.Value.fromI32(1234))
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'accessPointAutoApproval',
|
||||
ethereum.Value.fromBoolean(true)
|
||||
)
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam('minter', ethereum.Value.fromAddress(to))
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam('owner', ethereum.Value.fromAddress(to))
|
||||
);
|
||||
newMintEvent.parameters.push(
|
||||
new ethereum.EventParam('verifier', ethereum.Value.fromAddress(to))
|
||||
);
|
||||
|
||||
newMintEvent.transaction.hash = Bytes.fromI32(event_count);
|
||||
newMintEvent.logIndex = new BigInt(event_count);
|
||||
|
||||
return newMintEvent;
|
||||
}
|
||||
|
||||
export function createNewAccessPointEvent(
|
||||
event_count: i32,
|
||||
apName: string,
|
||||
tokenId: BigInt,
|
||||
owner: Address
|
||||
): NewAccessPoint {
|
||||
let newAccessPoint = changetype<NewAccessPoint>(newMockEvent());
|
||||
|
||||
newAccessPoint.parameters = new Array();
|
||||
|
||||
newAccessPoint.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'apName',
|
||||
ethereum.Value.fromString(apName.toString())
|
||||
)
|
||||
);
|
||||
|
||||
newAccessPoint.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'tokenId',
|
||||
ethereum.Value.fromUnsignedBigInt(tokenId)
|
||||
)
|
||||
);
|
||||
|
||||
newAccessPoint.parameters.push(
|
||||
new ethereum.EventParam('owner', ethereum.Value.fromAddress(owner))
|
||||
);
|
||||
|
||||
newAccessPoint.transaction.hash = Bytes.fromI32(event_count);
|
||||
newAccessPoint.logIndex = new BigInt(event_count);
|
||||
|
||||
return newAccessPoint;
|
||||
}
|
||||
|
||||
export function createNewChangeAccessPointCreationStatus(
|
||||
event_count: i32,
|
||||
apName: string,
|
||||
tokenId: BigInt,
|
||||
status: i32,
|
||||
triggeredBy: Address
|
||||
): ChangeAccessPointCreationStatus {
|
||||
let changeAccessPointCreationStatus =
|
||||
changetype<ChangeAccessPointCreationStatus>(newMockEvent());
|
||||
|
||||
changeAccessPointCreationStatus.parameters = new Array();
|
||||
|
||||
changeAccessPointCreationStatus.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'apName',
|
||||
ethereum.Value.fromString(apName.toString())
|
||||
)
|
||||
);
|
||||
|
||||
changeAccessPointCreationStatus.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'tokenId',
|
||||
ethereum.Value.fromUnsignedBigInt(tokenId)
|
||||
)
|
||||
);
|
||||
|
||||
changeAccessPointCreationStatus.parameters.push(
|
||||
new ethereum.EventParam('creationStatus', ethereum.Value.fromI32(status))
|
||||
);
|
||||
|
||||
changeAccessPointCreationStatus.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'triggeredBy',
|
||||
ethereum.Value.fromAddress(triggeredBy)
|
||||
)
|
||||
);
|
||||
|
||||
changeAccessPointCreationStatus.transaction.hash = Bytes.fromI32(event_count);
|
||||
changeAccessPointCreationStatus.logIndex = new BigInt(event_count);
|
||||
|
||||
return changeAccessPointCreationStatus;
|
||||
}
|
||||
|
||||
export function createNewChangeAccessPointNameVerify(
|
||||
event_count: i32,
|
||||
apName: string,
|
||||
tokenId: BigInt,
|
||||
verified: boolean,
|
||||
triggeredBy: Address
|
||||
): ChangeAccessPointNameVerify {
|
||||
let changeAccessPointNameVerify = changetype<ChangeAccessPointNameVerify>(
|
||||
newMockEvent()
|
||||
);
|
||||
|
||||
changeAccessPointNameVerify.parameters = new Array();
|
||||
|
||||
changeAccessPointNameVerify.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'apName',
|
||||
ethereum.Value.fromString(apName.toString())
|
||||
)
|
||||
);
|
||||
|
||||
changeAccessPointNameVerify.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'tokenId',
|
||||
ethereum.Value.fromUnsignedBigInt(tokenId)
|
||||
)
|
||||
);
|
||||
|
||||
changeAccessPointNameVerify.parameters.push(
|
||||
new ethereum.EventParam('verified', ethereum.Value.fromBoolean(verified))
|
||||
);
|
||||
|
||||
changeAccessPointNameVerify.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'triggeredBy',
|
||||
ethereum.Value.fromAddress(triggeredBy)
|
||||
)
|
||||
);
|
||||
|
||||
changeAccessPointNameVerify.transaction.hash = Bytes.fromI32(event_count);
|
||||
changeAccessPointNameVerify.logIndex = new BigInt(event_count);
|
||||
|
||||
return changeAccessPointNameVerify;
|
||||
}
|
||||
|
||||
export function createNewTokenRoleChanged(
|
||||
event_count: i32,
|
||||
tokenId: BigInt,
|
||||
role: i32,
|
||||
toAddress: Address,
|
||||
status: boolean,
|
||||
byAddress: Address
|
||||
): TokenRoleChanged {
|
||||
let tokenRoleChanged = changetype<TokenRoleChanged>(newMockEvent());
|
||||
|
||||
tokenRoleChanged.parameters = new Array();
|
||||
|
||||
tokenRoleChanged.parameters.push(
|
||||
new ethereum.EventParam(
|
||||
'tokenId',
|
||||
ethereum.Value.fromUnsignedBigInt(tokenId)
|
||||
)
|
||||
);
|
||||
|
||||
tokenRoleChanged.parameters.push(
|
||||
new ethereum.EventParam('role', ethereum.Value.fromI32(role))
|
||||
);
|
||||
|
||||
tokenRoleChanged.parameters.push(
|
||||
new ethereum.EventParam('toAddress', ethereum.Value.fromAddress(toAddress))
|
||||
);
|
||||
|
||||
tokenRoleChanged.parameters.push(
|
||||
new ethereum.EventParam('status', ethereum.Value.fromBoolean(status))
|
||||
);
|
||||
|
||||
tokenRoleChanged.parameters.push(
|
||||
new ethereum.EventParam('byAddress', ethereum.Value.fromAddress(byAddress))
|
||||
);
|
||||
|
||||
tokenRoleChanged.transaction.hash = Bytes.fromI32(event_count);
|
||||
tokenRoleChanged.logIndex = new BigInt(event_count);
|
||||
|
||||
return tokenRoleChanged;
|
||||
}
|
||||
|
||||
export function createNewCollectionRoleChanged(
|
||||
event_count: i32,
|
||||
role: i32,
|
||||
toAddress: Address,
|
||||
status: boolean,
|
||||
byAddress: Address
|
||||
): CollectionRoleChanged {
|
||||
let collectionRoleChanged = changetype<CollectionRoleChanged>(newMockEvent());
|
||||
|
||||
collectionRoleChanged.parameters = new Array();
|
||||
|
||||
collectionRoleChanged.parameters.push(
|
||||
new ethereum.EventParam('role', ethereum.Value.fromI32(role))
|
||||
);
|
||||
|
||||
collectionRoleChanged.parameters.push(
|
||||
new ethereum.EventParam('toAddress', ethereum.Value.fromAddress(toAddress))
|
||||
);
|
||||
|
||||
collectionRoleChanged.parameters.push(
|
||||
new ethereum.EventParam('status', ethereum.Value.fromBoolean(status))
|
||||
);
|
||||
|
||||
collectionRoleChanged.parameters.push(
|
||||
new ethereum.EventParam('byAddress', ethereum.Value.fromAddress(byAddress))
|
||||
);
|
||||
|
||||
collectionRoleChanged.transaction.hash = Bytes.fromI32(event_count);
|
||||
collectionRoleChanged.logIndex = new BigInt(event_count);
|
||||
|
||||
return collectionRoleChanged;
|
||||
}
|
||||
|
||||
export const CONTRACT: Address = Address.fromString(
|
||||
'0x0000000000000000000000000000000000000000'
|
||||
);
|
||||
export const CONTRACT_OWNER: Address = Address.fromString(
|
||||
'0x1000000000000000000000000000000000000001'
|
||||
);
|
||||
export const USER_ONE: Address = Address.fromString(
|
||||
'0x2000000000000000000000000000000000000002'
|
||||
);
|
||||
export const USER_TWO: Address = Address.fromString(
|
||||
'0x3000000000000000000000000000000000000003'
|
||||
);
|
||||
|
||||
export function handleTransfers(events: TransferEvent[]): void {
|
||||
events.forEach((event) => {
|
||||
handleTransfer(event);
|
||||
});
|
||||
}
|
||||
|
||||
export function handleApprovals(events: ApprovalEvent[]): void {
|
||||
events.forEach((event) => {
|
||||
handleApproval(event);
|
||||
});
|
||||
}
|
||||
|
||||
export function handleNewMints(events: NewMintEvent[]): void {
|
||||
events.forEach((event) => {
|
||||
handleNewMint(event);
|
||||
});
|
||||
}
|
||||
|
||||
export function handleApprovalForAlls(events: ApprovalForAllEvent[]): void {
|
||||
events.forEach((event) => {
|
||||
handleApprovalForAll(event);
|
||||
});
|
||||
}
|
||||
|
||||
export function handleNewAccessPoints(events: NewAccessPoint[]): void {
|
||||
events.forEach((event) => {
|
||||
handleNewAccessPoint(event);
|
||||
});
|
||||
}
|
||||
|
||||
export function handleChangeAccessPointCreationStatusList(
|
||||
events: ChangeAccessPointCreationStatus[]
|
||||
): void {
|
||||
events.forEach((event) => {
|
||||
handleChangeAccessPointCreationStatus(event);
|
||||
});
|
||||
}
|
||||
|
||||
export function handleChangeAccessPointNameVerifies(
|
||||
events: ChangeAccessPointNameVerify[]
|
||||
): void {
|
||||
events.forEach((event) => {
|
||||
handleChangeAccessPointNameVerify(event);
|
||||
});
|
||||
}
|
||||
|
||||
export function handleTokenRoleChangedList(events: TokenRoleChanged[]): void {
|
||||
events.forEach((event) => {
|
||||
handleTokenRoleChanged(event);
|
||||
});
|
||||
}
|
||||
|
||||
export function handleCollectionRoleChangedList(
|
||||
events: CollectionRoleChanged[]
|
||||
): void {
|
||||
events.forEach((event) => {
|
||||
handleCollectionRoleChanged(event);
|
||||
});
|
||||
}
|
||||
|
||||
export function makeEventId(id: i32): string {
|
||||
return Bytes.fromI32(id).toHexString() + '00000000';
|
||||
}
|
||||
|
|
@ -1,134 +0,0 @@
|
|||
import {
|
||||
assert,
|
||||
describe,
|
||||
test,
|
||||
clearStore,
|
||||
beforeAll,
|
||||
afterAll,
|
||||
logStore,
|
||||
log,
|
||||
} from 'matchstick-as/assembly/index';
|
||||
import { BigInt } from '@graphprotocol/graph-ts';
|
||||
import {
|
||||
CONTRACT,
|
||||
createNewMintEvent,
|
||||
createTransferEvent,
|
||||
handleNewMints,
|
||||
handleTransfers,
|
||||
makeEventId,
|
||||
USER_ONE,
|
||||
USER_TWO,
|
||||
} from './helpers/utils';
|
||||
import { NewMint, Transfer } from '../../generated/FleekNFA/FleekNFA';
|
||||
|
||||
describe('Owner tests', () => {
|
||||
beforeAll(() => {
|
||||
// NEW MINTS
|
||||
let newMints: NewMint[] = [];
|
||||
newMints.push(createNewMintEvent(0, USER_ONE, BigInt.fromI32(0)));
|
||||
newMints.push(createNewMintEvent(1, USER_TWO, BigInt.fromI32(1)));
|
||||
newMints.push(createNewMintEvent(2, USER_ONE, BigInt.fromI32(2)));
|
||||
newMints.push(createNewMintEvent(3, USER_ONE, BigInt.fromI32(3)));
|
||||
newMints.push(createNewMintEvent(4, USER_TWO, BigInt.fromI32(4)));
|
||||
handleNewMints(newMints);
|
||||
// TRANSFERS
|
||||
let transfers: Transfer[] = [];
|
||||
transfers.push(
|
||||
createTransferEvent(0, CONTRACT, USER_ONE, BigInt.fromI32(0))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(1, CONTRACT, USER_TWO, BigInt.fromI32(1))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(2, CONTRACT, USER_ONE, BigInt.fromI32(2))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(3, CONTRACT, USER_ONE, BigInt.fromI32(3))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(4, USER_TWO, USER_ONE, BigInt.fromI32(1))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(5, CONTRACT, USER_TWO, BigInt.fromI32(4))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(6, USER_ONE, USER_TWO, BigInt.fromI32(0))
|
||||
);
|
||||
handleTransfers(transfers);
|
||||
//logStore();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
clearStore();
|
||||
});
|
||||
|
||||
describe('Transfers', () => {
|
||||
test('Check the number of transfers to be valid', () => {
|
||||
assert.entityCount('Transfer', 7);
|
||||
});
|
||||
test('Check the `from` and `to` fields of each transfer to be equal to expected values', () => {
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(0),
|
||||
'to',
|
||||
'0x2000000000000000000000000000000000000002'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(1),
|
||||
'to',
|
||||
'0x3000000000000000000000000000000000000003'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(2),
|
||||
'to',
|
||||
'0x2000000000000000000000000000000000000002'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(3),
|
||||
'to',
|
||||
'0x2000000000000000000000000000000000000002'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(4),
|
||||
'to',
|
||||
'0x2000000000000000000000000000000000000002'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(5),
|
||||
'to',
|
||||
'0x3000000000000000000000000000000000000003'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(6),
|
||||
'to',
|
||||
'0x3000000000000000000000000000000000000003'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Owner Assertions', () => {
|
||||
test('Check the number of owners to be valid', () => {
|
||||
assert.entityCount('Owner', 2);
|
||||
});
|
||||
test('Check the existence of owners in store', () => {
|
||||
assert.fieldEquals(
|
||||
'Owner',
|
||||
USER_ONE.toHexString(),
|
||||
'id',
|
||||
USER_ONE.toHexString()
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Owner',
|
||||
USER_TWO.toHexString(),
|
||||
'id',
|
||||
USER_TWO.toHexString()
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,104 +0,0 @@
|
|||
import {
|
||||
assert,
|
||||
describe,
|
||||
test,
|
||||
clearStore,
|
||||
beforeAll,
|
||||
afterAll,
|
||||
logStore,
|
||||
log,
|
||||
} from 'matchstick-as/assembly/index';
|
||||
import { BigInt, Bytes } from '@graphprotocol/graph-ts';
|
||||
import {
|
||||
CONTRACT,
|
||||
createTransferEvent,
|
||||
handleTransfers,
|
||||
makeEventId,
|
||||
USER_ONE,
|
||||
USER_TWO,
|
||||
} from './helpers/utils';
|
||||
import { Transfer } from '../../generated/FleekNFA/FleekNFA';
|
||||
|
||||
describe('Transfer tests', () => {
|
||||
beforeAll(() => {
|
||||
// TRANSFERS
|
||||
let transfers: Transfer[] = [];
|
||||
transfers.push(
|
||||
createTransferEvent(0, CONTRACT, USER_ONE, BigInt.fromI32(0))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(1, CONTRACT, USER_TWO, BigInt.fromI32(1))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(2, CONTRACT, USER_ONE, BigInt.fromI32(2))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(3, CONTRACT, USER_ONE, BigInt.fromI32(3))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(4, USER_TWO, USER_ONE, BigInt.fromI32(1))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(5, CONTRACT, USER_TWO, BigInt.fromI32(4))
|
||||
);
|
||||
transfers.push(
|
||||
createTransferEvent(6, USER_ONE, USER_TWO, BigInt.fromI32(0))
|
||||
);
|
||||
handleTransfers(transfers);
|
||||
// logStore();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
clearStore();
|
||||
});
|
||||
|
||||
describe('Transfers', () => {
|
||||
test('Check the number of transfers to be valid', () => {
|
||||
assert.entityCount('Transfer', 7);
|
||||
});
|
||||
test('Check the `from` and `to` fields of each transfer to be equal to expected values', () => {
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(0),
|
||||
'to',
|
||||
'0x2000000000000000000000000000000000000002'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(1),
|
||||
'to',
|
||||
'0x3000000000000000000000000000000000000003'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(2),
|
||||
'to',
|
||||
'0x2000000000000000000000000000000000000002'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(3),
|
||||
'to',
|
||||
'0x2000000000000000000000000000000000000002'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(4),
|
||||
'to',
|
||||
'0x2000000000000000000000000000000000000002'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(5),
|
||||
'to',
|
||||
'0x3000000000000000000000000000000000000003'
|
||||
);
|
||||
assert.fieldEquals(
|
||||
'Transfer',
|
||||
makeEventId(6),
|
||||
'to',
|
||||
'0x3000000000000000000000000000000000000003'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -3,7 +3,18 @@ sources:
|
|||
- name: FleekNFA
|
||||
handler:
|
||||
graphql:
|
||||
endpoint: https://api.thegraph.com/subgraphs/name/emperororokusaki/flk-test-subgraph #replace for nfa subgraph
|
||||
endpoint: https://api.thegraph.com/subgraphs/name/emperororokusaki/second-test-subgraph
|
||||
|
||||
- name: ENS
|
||||
handler:
|
||||
graphql:
|
||||
endpoint: https://api.thegraph.com/subgraphs/name/ensdomains/ens
|
||||
transforms:
|
||||
- rename:
|
||||
- from:
|
||||
type: Transfer
|
||||
to:
|
||||
type: ENSTransfer
|
||||
|
||||
documents:
|
||||
- ./graphql/*.graphql
|
||||
|
|
|
|||
|
|
@ -1,16 +0,0 @@
|
|||
module.exports = {
|
||||
stories: ['../src/**/*.stories.@(js|jsx|ts|tsx)'],
|
||||
addons: [
|
||||
'@storybook/addon-links',
|
||||
'@storybook/addon-essentials',
|
||||
'@storybook/addon-interactions',
|
||||
'storybook-dark-mode',
|
||||
],
|
||||
framework: '@storybook/react',
|
||||
core: {
|
||||
builder: '@storybook/builder-vite',
|
||||
},
|
||||
features: {
|
||||
storyStoreV7: true,
|
||||
},
|
||||
};
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
<link
|
||||
href="https://fonts.googleapis.com/css2?family=Manrope:wght@400;500;700&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<script>
|
||||
window.global = window;
|
||||
</script>
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
import { dripStitches } from '../src/theme';
|
||||
import addons from '@storybook/addons';
|
||||
import { useEffect } from 'react';
|
||||
import { themes } from '@storybook/theming';
|
||||
|
||||
const channel = addons.getChannel();
|
||||
|
||||
export const parameters = {
|
||||
actions: { argTypesRegex: '^on[A-Z].*' },
|
||||
controls: {
|
||||
matchers: {
|
||||
color: /(background|color)$/i,
|
||||
date: /Date$/,
|
||||
},
|
||||
},
|
||||
darkMode: {
|
||||
dark: { ...themes.dark, backgroundColor: 'black' },
|
||||
// light: { ...themes.normal, backgroundColor: 'white' },
|
||||
},
|
||||
};
|
||||
|
||||
const { darkTheme: darkThemeClassName } = dripStitches;
|
||||
|
||||
export const decorators = [
|
||||
(renderStory) => {
|
||||
const { globalCss } = dripStitches;
|
||||
|
||||
const globalStyles = globalCss({
|
||||
'*, html': {
|
||||
'font-family': 'Manrope',
|
||||
},
|
||||
body: {
|
||||
backgroundColor: 'black',
|
||||
},
|
||||
});
|
||||
|
||||
globalStyles();
|
||||
|
||||
useEffect(() => {
|
||||
function switchColorMode(isDarkMode) {
|
||||
document.body.style.backgroundColor = isDarkMode ? 'black' : 'white';
|
||||
document.body.classList.remove('light', darkThemeClassName);
|
||||
document.body.classList.add(isDarkMode ? darkThemeClassName : 'light');
|
||||
}
|
||||
|
||||
channel.on('DARK_MODE', switchColorMode);
|
||||
return () => channel.off('DARK_MODE', switchColorMode);
|
||||
}, []);
|
||||
|
||||
return renderStory();
|
||||
},
|
||||
];
|
||||
14
ui/README.md
14
ui/README.md
|
|
@ -12,6 +12,12 @@ You'll need to have [nodejs](https://nodejs.org/en/) and [YARN](https://classic.
|
|||
|
||||
Also, don't forget to check the [Getting started section](https://github.com/fleekxyz/non-fungible-apps/wiki/%F0%9F%93%98-Getting-Started) on the wiki if you didn't do it yet, cause you need to configure your wallet to be able to mint a site.
|
||||
|
||||
### Setting Contract Address and ABI
|
||||
|
||||
The contract address and ABI is set by pointing `ui/src/integrations/ethereum/contracts/FleekERC721.json` to the file from the deployment outputs in the contract sub project.
|
||||
|
||||
This can be a local deployment or a deployment on one of the networks. This maintains consistency between the deployed contracts and the info in the UI configuration.
|
||||
|
||||
### 🖥️ Running
|
||||
|
||||
To run the UI localy follow the steps:
|
||||
|
|
@ -51,7 +57,13 @@ To run the UI localy follow the steps:
|
|||
|
||||
Get them from the project settings on the firebase dashboard. Read [this article](https://support.google.com/firebase/answer/7015592?hl=en#zippy=%2Cin-this-article) to know how to get your porject config
|
||||
|
||||
4. Start the local server running the app:
|
||||
4. To interact with the contract, you need to set the Goerli RPC. Set this variable on the .env file
|
||||
|
||||
```bash
|
||||
VITE_GOERLI_RPC
|
||||
```
|
||||
|
||||
5. Start the local server running the app:
|
||||
|
||||
```bash
|
||||
$ yarn dev
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"id": "clg2kh5db0000mj0851w4y9xi",
|
||||
"name": "nfa-dev-deploy",
|
||||
"distDir": "dist"
|
||||
}
|
||||
|
|
@ -1,14 +1,27 @@
|
|||
query lastMintsPaginated($pageSize: Int, $skip: Int) {
|
||||
newMints(
|
||||
query lastNFAsPaginated(
|
||||
$pageSize: Int
|
||||
$skip: Int
|
||||
$orderBy: Token_orderBy
|
||||
$orderDirection: OrderDirection
|
||||
$searchValue: String
|
||||
) {
|
||||
tokens(
|
||||
first: $pageSize
|
||||
skip: $skip
|
||||
orderDirection: desc
|
||||
orderBy: tokenId
|
||||
orderDirection: $orderDirection
|
||||
orderBy: $orderBy
|
||||
where: { name_contains_nocase: $searchValue }
|
||||
) {
|
||||
id
|
||||
tokenId
|
||||
description
|
||||
name
|
||||
ENS
|
||||
color
|
||||
logo
|
||||
accessPoints {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -18,3 +31,32 @@ query totalTokens {
|
|||
}
|
||||
}
|
||||
|
||||
query getLatestNFAs {
|
||||
tokens {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
query getNFA($id: ID!) {
|
||||
token(id: $id) {
|
||||
tokenId
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
query getVerifiers {
|
||||
verifiers {
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
# query to get the ens name of an address
|
||||
query getENSNames($address: ID!) {
|
||||
account(id: $address) {
|
||||
domains {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,15 +3,14 @@
|
|||
"version": "0.0.1",
|
||||
"description": "Minimal UI for Fleek Non Fungible Apps",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"dev:css": "tailwindcss -o ./tailwind.css --watch && yarn dev",
|
||||
"build": "yarn graphclient build && vite build",
|
||||
"postinstall": "graphclient build",
|
||||
"preview": "vite preview",
|
||||
"prod": "yarn build && npx serve dist -s",
|
||||
"storybook": "export SET NODE_OPTIONS=--openssl-legacy-provider && start-storybook -p 6006",
|
||||
"build-storybook": "build-storybook"
|
||||
"prod": "yarn build && npx serve dist -s"
|
||||
},
|
||||
"author": "Fleek",
|
||||
"dependencies": {
|
||||
|
|
@ -43,15 +42,7 @@
|
|||
"devDependencies": {
|
||||
"@babel/core": "^7.20.12",
|
||||
"@graphprotocol/client-cli": "^2.2.19",
|
||||
"@storybook/addon-actions": "^6.5.15",
|
||||
"@storybook/addon-essentials": "^6.5.15",
|
||||
"@storybook/addon-interactions": "^6.5.15",
|
||||
"@storybook/addon-links": "^6.5.15",
|
||||
"@storybook/addons": "^6.5.15",
|
||||
"@storybook/builder-vite": "^0.2.7",
|
||||
"@storybook/react": "^6.5.15",
|
||||
"@storybook/testing-library": "^0.0.13",
|
||||
"@storybook/theming": "^6.5.15",
|
||||
"@graphql-mesh/transform-rename": "^0.14.22",
|
||||
"@types/jest": "^29.2.3",
|
||||
"@types/node": "^18.11.9",
|
||||
"@types/react": "^18.0.25",
|
||||
|
|
@ -71,7 +62,6 @@
|
|||
"prettier": "^2.8.0",
|
||||
"process": "^0.11.10",
|
||||
"react-query": "^3.39.2",
|
||||
"storybook-dark-mode": "^2.0.5",
|
||||
"tailwindcss": "^3.2.4",
|
||||
"ts-loader": "^9.4.1",
|
||||
"typescript": "^4.9.3",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
// eslint-disable-next-line no-undef
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
|
|
|
|||
|
|
@ -1,30 +1,27 @@
|
|||
import { HashRouter, Route, Routes, Navigate } from 'react-router-dom';
|
||||
import { themeGlobals } from '@/theme/globals';
|
||||
import { ComponentsTest, Home, Mint } from './views';
|
||||
import { ConnectKitButton } from 'connectkit';
|
||||
import { MintTest } from './views/mint-test';
|
||||
import { ToastProvider } from './components';
|
||||
import { CreateAP } from './views/access-point';
|
||||
import { HashRouter, Navigate, Route, Routes } from 'react-router-dom';
|
||||
|
||||
export const App = () => {
|
||||
import { themeGlobals } from '@/theme/globals';
|
||||
|
||||
import { AppPage, ToastProvider } from './components';
|
||||
import { ComponentsTest, CreateAP, Explore, Home, Mint } from './views';
|
||||
|
||||
export const App: React.FC = () => {
|
||||
themeGlobals();
|
||||
return (
|
||||
<>
|
||||
<div style={{ position: 'absolute', top: '1.25rem', left: '1.25rem' }}>
|
||||
{/* TODO remove after adding NavBar */}
|
||||
<ConnectKitButton />
|
||||
</div>
|
||||
<ToastProvider />
|
||||
<HashRouter>
|
||||
<Routes>
|
||||
<Route path="/home" element={<Home />} />
|
||||
<Route path="/mint" element={<Mint />} />
|
||||
<Route path="/create-ap/:id" element={<CreateAP />} />
|
||||
{/** TODO remove for release */}
|
||||
<Route path="/components-test" element={<ComponentsTest />} />
|
||||
<Route path="/mint-test" element={<MintTest />} />
|
||||
<Route path="*" element={<Navigate to="/home" />} />
|
||||
</Routes>
|
||||
<ToastProvider />
|
||||
<AppPage>
|
||||
<Routes>
|
||||
<Route path="/" element={<Explore />} />
|
||||
<Route path="/mint" element={<Mint />} />
|
||||
<Route path="/create-ap" element={<CreateAP />} />
|
||||
<Route path="/create-ap/:id" element={<CreateAP />} />
|
||||
{/** TODO remove for release */}
|
||||
<Route path="/components-test" element={<ComponentsTest />} />
|
||||
<Route path="*" element={<Navigate to="/" />} />
|
||||
</Routes>
|
||||
</AppPage>
|
||||
</HashRouter>
|
||||
</>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
import { dripStitches } from '@/theme';
|
||||
|
||||
const { styled } = dripStitches;
|
||||
import { styled } from '@/theme';
|
||||
|
||||
export abstract class CardStyles {
|
||||
static readonly Container = styled('div', {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import { Octokit } from 'octokit';
|
||||
/* eslint-disable react/display-name */
|
||||
import React, { forwardRef } from 'react';
|
||||
|
||||
import { Flex } from '../layout';
|
||||
import { CardStyles } from './card.styles';
|
||||
|
||||
|
|
@ -15,9 +16,9 @@ export abstract class Card {
|
|||
);
|
||||
|
||||
static readonly Heading = forwardRef<HTMLHeadingElement, Card.HeadingProps>(
|
||||
({ title, leftIcon, rightIcon, ...props }, ref) => {
|
||||
({ title, leftIcon, rightIcon, css, ...props }, ref) => {
|
||||
return (
|
||||
<Flex css={{ justifyContent: 'space-between' }}>
|
||||
<Flex css={{ justifyContent: 'space-between', ...css }}>
|
||||
<Flex>
|
||||
{leftIcon}
|
||||
<CardStyles.Heading ref={ref} {...props}>
|
||||
|
|
@ -58,6 +59,7 @@ export namespace Card {
|
|||
|
||||
export type HeadingProps = {
|
||||
title: string;
|
||||
css?: React.CSSProperties;
|
||||
leftIcon?: React.ReactNode;
|
||||
rightIcon?: React.ReactNode;
|
||||
} & React.ComponentProps<typeof CardStyles.Heading>;
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue