Merge pull request #283 from fleekxyz/release/v0.1.0

RELEASE: v0.1.0
This commit is contained in:
Shredder 2023-07-06 20:14:36 +03:30 committed by GitHub
commit cd27920cbd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
84 changed files with 13196 additions and 4480 deletions

View File

@ -1,20 +1,22 @@
# Fleek Non-Fungible Apps
**The repository for Fleek Non-Fungible Apps project**
# Non-Fungible Apps - by Fleek
> 🚧 IMPORTANT 🚧 - This initiative is under development, so this repo should be treated as a WIP. The goals and the roadmap might change as the project is shaped.
## ⚡ Overview
This is the landing point for Fleek's initiative to implement infrastructure as Solidity contracts.
This is the landing point for our initiative to decentralize web3 app frontend infrastructure, their distribution, and access, through Non-fungible Applications, an NFT-based implementation.
The vision is to have this on Ethereum Mainnet. We can create a network of smart contracts that represent the different parts of your stack. We had previously targeted Polygon but have changed it to Ethereum Mainnet for better interoperability and the security that mainnet provides.
**The goal of NFAs** is to put web3 application frontends, and their surrounding infra, on-chain to provide users with a verifiable and crypto-friendly way of accessing applications.
The goal is to be a more verifiable and crypto-friendly Serverless.yaml or Cloudformation manifests that will enable us to develop use cases on top like community hosting.
1. Developers mint their app's frontend onto an NFT, where the frontend bundle is stored and referenced from IPFS.
2. Users mint prints of the NFA, which allows them to have a decentralized and permanently on-chain access point to any web3 app.
3. Users can load said apps without relying on DNS, for example, locally via the browser or through a potentially integrated wallet.
We've developed a base set of contract code so now we want to harden it as well as add more metadata and features to support use cases with the first use case being community hosting. So be sure to check out the roadmap on the wiki.
We want to have chain and wallet agnostic standard. While we use ERC-721 as a base reference, the base idea is to be kept simple, with extenesions/modules separate, so that NFAs can be easily replicable in any comparable chain, language, or NFT standard.
We've developed a base set of contract code so now we want to harden it as well as add more metadata and features to support the additions of new modules - So be sure to open issues or PRs, as we welcome all contributions and collaborations!
You can find the wiki [here](https://github.com/fleekxyz/non-fungible-apps/wiki) for more information about the project.
## ⌨️ Developing
@ -54,20 +56,16 @@ $ yarn format
> ⚠️ Please make sure you are following the code styling guide before pushing the code
## 🛣️ Roadmap
## 🛣️ Development Roadmap
Our goal is to reach a point where trustable Solidity contracts can be used for identifying properly the data about web3 applications. Within that goal, we want to also provide ways for users to organize and list information about their applications. To get at this we are currently starting with:
- Define trustable and extendable smart contracts and standards
- Prove how the concept would be applied using static sites
- Prove community-hosted apps via these contracts
- Prove the minting and copy-mint flow for these contracts
Later on, when the initiative proves its value, a service will be added to Fleek's platform in a friendly way for anyone to be able to get their applications onboard.
## 💡 Proof of concept
The proof of concept was concluded last year and you can reach more information [here](https://github.com/fleekxyz/non-fungible-apps/wiki/%F0%9F%92%A1-Proof-of-Concept).
## 📚 Dependency Highlights
We use the following libraries to develop Fleek Non-Fungible Apps
@ -91,4 +89,4 @@ Fleek Non-Fungible Apps is released under the [MIT License](LICENSE).
## 🐛 Bug reporting
If you have found a bug to report, please create an [issue](https://github.com/fleekxyz/non-fungible-apps/issues). Thank you!
If you have found a bug to report, please create an [issue](https://github.com/fleekxyz/non-fungible-apps/issues). Thank you!

View File

@ -2,6 +2,7 @@
cache
artifacts
deployments/hardhat
deployments/local
gas-report
# Foundry

View File

@ -9,6 +9,11 @@
"address": "0x8795608346Eb475E42e69F1281008AEAa522479D",
"txHash": "0x626662cdb0902646dd70d3ef50abb00c12614d8e572b175f2e45a40a73d4954e",
"kind": "transparent"
},
{
"address": "0xce89f3853a8bE0b274b2aD6fa87F3f2b575164cE",
"txHash": "0xc47529f41c4694ad1481ecbc0d5e15ae4348bf1c61cee7b4aa37472458f3eede",
"kind": "transparent"
}
],
"impls": {
@ -486,6 +491,501 @@
}
}
}
},
"9905a387868ffc9dbf779f1e1577ece30843b18653e189326fced2b96114ff68": {
"address": "0xcAee0AB33a3aB6CE9b4fdc6A909f432d48834046",
"txHash": "0x465f1543475cc6ed8839f0b48f7196a519273e033ff81313455076d4dfa10163",
"layout": {
"solcVersion": "0.8.12",
"storage": [
{
"label": "_initialized",
"offset": 0,
"slot": "0",
"type": "t_uint8",
"contract": "Initializable",
"src": "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol:62",
"retypedFrom": "bool"
},
{
"label": "_initializing",
"offset": 1,
"slot": "0",
"type": "t_bool",
"contract": "Initializable",
"src": "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol:67"
},
{
"label": "__gap",
"offset": 0,
"slot": "1",
"type": "t_array(t_uint256)50_storage",
"contract": "ContextUpgradeable",
"src": "@openzeppelin/contracts-upgradeable/utils/ContextUpgradeable.sol:36"
},
{
"label": "__gap",
"offset": 0,
"slot": "51",
"type": "t_array(t_uint256)50_storage",
"contract": "ERC165Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/utils/introspection/ERC165Upgradeable.sol:41"
},
{
"label": "_name",
"offset": 0,
"slot": "101",
"type": "t_string_storage",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:25"
},
{
"label": "_symbol",
"offset": 0,
"slot": "102",
"type": "t_string_storage",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:28"
},
{
"label": "_owners",
"offset": 0,
"slot": "103",
"type": "t_mapping(t_uint256,t_address)",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:31"
},
{
"label": "_balances",
"offset": 0,
"slot": "104",
"type": "t_mapping(t_address,t_uint256)",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:34"
},
{
"label": "_tokenApprovals",
"offset": 0,
"slot": "105",
"type": "t_mapping(t_uint256,t_address)",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:37"
},
{
"label": "_operatorApprovals",
"offset": 0,
"slot": "106",
"type": "t_mapping(t_address,t_mapping(t_address,t_bool))",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:40"
},
{
"label": "__gap",
"offset": 0,
"slot": "107",
"type": "t_array(t_uint256)44_storage",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:514"
},
{
"label": "_collectionRolesCounter",
"offset": 0,
"slot": "151",
"type": "t_mapping(t_enum(CollectionRoles)4485,t_uint256)",
"contract": "FleekAccessControl",
"src": "contracts/FleekAccessControl.sol:58"
},
{
"label": "_collectionRoles",
"offset": 0,
"slot": "152",
"type": "t_mapping(t_enum(CollectionRoles)4485,t_mapping(t_address,t_bool))",
"contract": "FleekAccessControl",
"src": "contracts/FleekAccessControl.sol:63"
},
{
"label": "_tokenRolesVersion",
"offset": 0,
"slot": "153",
"type": "t_mapping(t_uint256,t_uint256)",
"contract": "FleekAccessControl",
"src": "contracts/FleekAccessControl.sol:70"
},
{
"label": "_tokenRoles",
"offset": 0,
"slot": "154",
"type": "t_mapping(t_uint256,t_mapping(t_uint256,t_mapping(t_enum(TokenRoles)4487,t_mapping(t_address,t_bool))))",
"contract": "FleekAccessControl",
"src": "contracts/FleekAccessControl.sol:75"
},
{
"label": "__gap",
"offset": 0,
"slot": "155",
"type": "t_array(t_uint256)49_storage",
"contract": "FleekAccessControl",
"src": "contracts/FleekAccessControl.sol:178"
},
{
"label": "_paused",
"offset": 0,
"slot": "204",
"type": "t_bool",
"contract": "FleekPausable",
"src": "contracts/FleekPausable.sol:23"
},
{
"label": "_canPause",
"offset": 1,
"slot": "204",
"type": "t_bool",
"contract": "FleekPausable",
"src": "contracts/FleekPausable.sol:24"
},
{
"label": "__gap",
"offset": 0,
"slot": "205",
"type": "t_array(t_uint256)49_storage",
"contract": "FleekPausable",
"src": "contracts/FleekPausable.sol:133"
},
{
"label": "_billings",
"offset": 0,
"slot": "254",
"type": "t_mapping(t_enum(Billing)5630,t_uint256)",
"contract": "FleekBilling",
"src": "contracts/FleekBilling.sol:31"
},
{
"label": "__gap",
"offset": 0,
"slot": "255",
"type": "t_array(t_uint256)49_storage",
"contract": "FleekBilling",
"src": "contracts/FleekBilling.sol:81"
},
{
"label": "_accessPoints",
"offset": 0,
"slot": "304",
"type": "t_mapping(t_string_memory_ptr,t_struct(AccessPoint)4984_storage)",
"contract": "FleekAccessPoints",
"src": "contracts/FleekAccessPoints.sol:64"
},
{
"label": "_autoApproval",
"offset": 0,
"slot": "305",
"type": "t_mapping(t_uint256,t_bool)",
"contract": "FleekAccessPoints",
"src": "contracts/FleekAccessPoints.sol:66"
},
{
"label": "__gap",
"offset": 0,
"slot": "306",
"type": "t_array(t_uint256)49_storage",
"contract": "FleekAccessPoints",
"src": "contracts/FleekAccessPoints.sol:211"
},
{
"label": "_appIds",
"offset": 0,
"slot": "355",
"type": "t_uint256",
"contract": "FleekERC721",
"src": "contracts/FleekERC721.sol:53"
},
{
"label": "_apps",
"offset": 0,
"slot": "356",
"type": "t_mapping(t_uint256,t_struct(Token)7426_storage)",
"contract": "FleekERC721",
"src": "contracts/FleekERC721.sol:54"
},
{
"label": "_tokenVerifier",
"offset": 0,
"slot": "357",
"type": "t_mapping(t_uint256,t_address)",
"contract": "FleekERC721",
"src": "contracts/FleekERC721.sol:55"
},
{
"label": "_tokenVerified",
"offset": 0,
"slot": "358",
"type": "t_mapping(t_uint256,t_bool)",
"contract": "FleekERC721",
"src": "contracts/FleekERC721.sol:56"
}
],
"types": {
"t_address": {
"label": "address",
"numberOfBytes": "20"
},
"t_array(t_uint256)44_storage": {
"label": "uint256[44]",
"numberOfBytes": "1408"
},
"t_array(t_uint256)49_storage": {
"label": "uint256[49]",
"numberOfBytes": "1568"
},
"t_array(t_uint256)50_storage": {
"label": "uint256[50]",
"numberOfBytes": "1600"
},
"t_bool": {
"label": "bool",
"numberOfBytes": "1"
},
"t_enum(AccessPointCreationStatus)4970": {
"label": "enum FleekAccessPoints.AccessPointCreationStatus",
"members": [
"DRAFT",
"APPROVED",
"REJECTED",
"REMOVED"
],
"numberOfBytes": "1"
},
"t_enum(Billing)5630": {
"label": "enum FleekBilling.Billing",
"members": [
"Mint",
"AddAccessPoint"
],
"numberOfBytes": "1"
},
"t_enum(CollectionRoles)4485": {
"label": "enum FleekAccessControl.CollectionRoles",
"members": [
"Owner",
"Verifier"
],
"numberOfBytes": "1"
},
"t_enum(TokenRoles)4487": {
"label": "enum FleekAccessControl.TokenRoles",
"members": [
"Controller"
],
"numberOfBytes": "1"
},
"t_mapping(t_address,t_bool)": {
"label": "mapping(address => bool)",
"numberOfBytes": "32"
},
"t_mapping(t_address,t_mapping(t_address,t_bool))": {
"label": "mapping(address => mapping(address => bool))",
"numberOfBytes": "32"
},
"t_mapping(t_address,t_uint256)": {
"label": "mapping(address => uint256)",
"numberOfBytes": "32"
},
"t_mapping(t_enum(Billing)5630,t_uint256)": {
"label": "mapping(enum FleekBilling.Billing => uint256)",
"numberOfBytes": "32"
},
"t_mapping(t_enum(CollectionRoles)4485,t_mapping(t_address,t_bool))": {
"label": "mapping(enum FleekAccessControl.CollectionRoles => mapping(address => bool))",
"numberOfBytes": "32"
},
"t_mapping(t_enum(CollectionRoles)4485,t_uint256)": {
"label": "mapping(enum FleekAccessControl.CollectionRoles => uint256)",
"numberOfBytes": "32"
},
"t_mapping(t_enum(TokenRoles)4487,t_mapping(t_address,t_bool))": {
"label": "mapping(enum FleekAccessControl.TokenRoles => mapping(address => bool))",
"numberOfBytes": "32"
},
"t_mapping(t_string_memory_ptr,t_struct(AccessPoint)4984_storage)": {
"label": "mapping(string => struct FleekAccessPoints.AccessPoint)",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_address)": {
"label": "mapping(uint256 => address)",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_bool)": {
"label": "mapping(uint256 => bool)",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_mapping(t_enum(TokenRoles)4487,t_mapping(t_address,t_bool)))": {
"label": "mapping(uint256 => mapping(enum FleekAccessControl.TokenRoles => mapping(address => bool)))",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_mapping(t_uint256,t_mapping(t_enum(TokenRoles)4487,t_mapping(t_address,t_bool))))": {
"label": "mapping(uint256 => mapping(uint256 => mapping(enum FleekAccessControl.TokenRoles => mapping(address => bool))))",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_struct(Build)7406_storage)": {
"label": "mapping(uint256 => struct IERCX.Build)",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_struct(Token)7426_storage)": {
"label": "mapping(uint256 => struct IERCX.Token)",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_uint256)": {
"label": "mapping(uint256 => uint256)",
"numberOfBytes": "32"
},
"t_string_memory_ptr": {
"label": "string",
"numberOfBytes": "32"
},
"t_string_storage": {
"label": "string",
"numberOfBytes": "32"
},
"t_struct(AccessPoint)4984_storage": {
"label": "struct FleekAccessPoints.AccessPoint",
"members": [
{
"label": "tokenId",
"type": "t_uint256",
"offset": 0,
"slot": "0"
},
{
"label": "score",
"type": "t_uint256",
"offset": 0,
"slot": "1"
},
{
"label": "contentVerified",
"type": "t_bool",
"offset": 0,
"slot": "2"
},
{
"label": "nameVerified",
"type": "t_bool",
"offset": 1,
"slot": "2"
},
{
"label": "owner",
"type": "t_address",
"offset": 2,
"slot": "2"
},
{
"label": "status",
"type": "t_enum(AccessPointCreationStatus)4970",
"offset": 22,
"slot": "2"
}
],
"numberOfBytes": "96"
},
"t_struct(Build)7406_storage": {
"label": "struct IERCX.Build",
"members": [
{
"label": "commitHash",
"type": "t_string_storage",
"offset": 0,
"slot": "0"
},
{
"label": "gitRepository",
"type": "t_string_storage",
"offset": 0,
"slot": "1"
},
{
"label": "ipfsHash",
"type": "t_string_storage",
"offset": 0,
"slot": "2"
},
{
"label": "domain",
"type": "t_string_storage",
"offset": 0,
"slot": "3"
}
],
"numberOfBytes": "128"
},
"t_struct(Token)7426_storage": {
"label": "struct IERCX.Token",
"members": [
{
"label": "name",
"type": "t_string_storage",
"offset": 0,
"slot": "0"
},
{
"label": "description",
"type": "t_string_storage",
"offset": 0,
"slot": "1"
},
{
"label": "externalURL",
"type": "t_string_storage",
"offset": 0,
"slot": "2"
},
{
"label": "ENS",
"type": "t_string_storage",
"offset": 0,
"slot": "3"
},
{
"label": "logo",
"type": "t_string_storage",
"offset": 0,
"slot": "4"
},
{
"label": "color",
"type": "t_uint24",
"offset": 0,
"slot": "5"
},
{
"label": "currentBuild",
"type": "t_uint256",
"offset": 0,
"slot": "6"
},
{
"label": "builds",
"type": "t_mapping(t_uint256,t_struct(Build)7406_storage)",
"offset": 0,
"slot": "7"
}
],
"numberOfBytes": "256"
},
"t_uint24": {
"label": "uint24",
"numberOfBytes": "3"
},
"t_uint256": {
"label": "uint256",
"numberOfBytes": "32"
},
"t_uint8": {
"label": "uint8",
"numberOfBytes": "1"
}
}
}
}
}
}

View File

@ -39,6 +39,11 @@
"address": "0x37150709cFf366DeEaB836d05CAf49F4DA46Bb2E",
"txHash": "0x808546aa8bbc4e36c54d955970d8cfe8c4dc925eb5f65ff7b25203dd312bad4c",
"kind": "transparent"
},
{
"address": "0xdAbc1E0f926545a2898c644870FA4DC39E83EB70",
"txHash": "0xfc68decd1ce3d80c8bb94ef0c715792ef0fc555e9b63b85945841349a5640918",
"kind": "transparent"
}
],
"impls": {
@ -7445,6 +7450,501 @@
}
}
}
},
"3f9c0fe2bffef556e080d82b03898e016c29a3ded5d8dc62eaad7963752823a7": {
"address": "0x5c456c91fe4ADd4440aAfCb856C66F105e8Ade83",
"txHash": "0x6ce06b676b454b622edac784f38d9c1acadd7f0927a51ae3ee765a099c9195cd",
"layout": {
"solcVersion": "0.8.12",
"storage": [
{
"label": "_initialized",
"offset": 0,
"slot": "0",
"type": "t_uint8",
"contract": "Initializable",
"src": "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol:62",
"retypedFrom": "bool"
},
{
"label": "_initializing",
"offset": 1,
"slot": "0",
"type": "t_bool",
"contract": "Initializable",
"src": "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol:67"
},
{
"label": "__gap",
"offset": 0,
"slot": "1",
"type": "t_array(t_uint256)50_storage",
"contract": "ContextUpgradeable",
"src": "@openzeppelin/contracts-upgradeable/utils/ContextUpgradeable.sol:36"
},
{
"label": "__gap",
"offset": 0,
"slot": "51",
"type": "t_array(t_uint256)50_storage",
"contract": "ERC165Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/utils/introspection/ERC165Upgradeable.sol:41"
},
{
"label": "_name",
"offset": 0,
"slot": "101",
"type": "t_string_storage",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:25"
},
{
"label": "_symbol",
"offset": 0,
"slot": "102",
"type": "t_string_storage",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:28"
},
{
"label": "_owners",
"offset": 0,
"slot": "103",
"type": "t_mapping(t_uint256,t_address)",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:31"
},
{
"label": "_balances",
"offset": 0,
"slot": "104",
"type": "t_mapping(t_address,t_uint256)",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:34"
},
{
"label": "_tokenApprovals",
"offset": 0,
"slot": "105",
"type": "t_mapping(t_uint256,t_address)",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:37"
},
{
"label": "_operatorApprovals",
"offset": 0,
"slot": "106",
"type": "t_mapping(t_address,t_mapping(t_address,t_bool))",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:40"
},
{
"label": "__gap",
"offset": 0,
"slot": "107",
"type": "t_array(t_uint256)44_storage",
"contract": "ERC721Upgradeable",
"src": "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol:514"
},
{
"label": "_collectionRolesCounter",
"offset": 0,
"slot": "151",
"type": "t_mapping(t_enum(CollectionRoles)4485,t_uint256)",
"contract": "FleekAccessControl",
"src": "contracts/FleekAccessControl.sol:58"
},
{
"label": "_collectionRoles",
"offset": 0,
"slot": "152",
"type": "t_mapping(t_enum(CollectionRoles)4485,t_mapping(t_address,t_bool))",
"contract": "FleekAccessControl",
"src": "contracts/FleekAccessControl.sol:63"
},
{
"label": "_tokenRolesVersion",
"offset": 0,
"slot": "153",
"type": "t_mapping(t_uint256,t_uint256)",
"contract": "FleekAccessControl",
"src": "contracts/FleekAccessControl.sol:70"
},
{
"label": "_tokenRoles",
"offset": 0,
"slot": "154",
"type": "t_mapping(t_uint256,t_mapping(t_uint256,t_mapping(t_enum(TokenRoles)4487,t_mapping(t_address,t_bool))))",
"contract": "FleekAccessControl",
"src": "contracts/FleekAccessControl.sol:75"
},
{
"label": "__gap",
"offset": 0,
"slot": "155",
"type": "t_array(t_uint256)49_storage",
"contract": "FleekAccessControl",
"src": "contracts/FleekAccessControl.sol:178"
},
{
"label": "_paused",
"offset": 0,
"slot": "204",
"type": "t_bool",
"contract": "FleekPausable",
"src": "contracts/FleekPausable.sol:23"
},
{
"label": "_canPause",
"offset": 1,
"slot": "204",
"type": "t_bool",
"contract": "FleekPausable",
"src": "contracts/FleekPausable.sol:24"
},
{
"label": "__gap",
"offset": 0,
"slot": "205",
"type": "t_array(t_uint256)49_storage",
"contract": "FleekPausable",
"src": "contracts/FleekPausable.sol:133"
},
{
"label": "_billings",
"offset": 0,
"slot": "254",
"type": "t_mapping(t_enum(Billing)5452,t_uint256)",
"contract": "FleekBilling",
"src": "contracts/FleekBilling.sol:31"
},
{
"label": "__gap",
"offset": 0,
"slot": "255",
"type": "t_array(t_uint256)49_storage",
"contract": "FleekBilling",
"src": "contracts/FleekBilling.sol:81"
},
{
"label": "_accessPoints",
"offset": 0,
"slot": "304",
"type": "t_mapping(t_string_memory_ptr,t_struct(AccessPoint)4984_storage)",
"contract": "FleekAccessPoints",
"src": "contracts/FleekAccessPoints.sol:64"
},
{
"label": "_autoApproval",
"offset": 0,
"slot": "305",
"type": "t_mapping(t_uint256,t_bool)",
"contract": "FleekAccessPoints",
"src": "contracts/FleekAccessPoints.sol:66"
},
{
"label": "__gap",
"offset": 0,
"slot": "306",
"type": "t_array(t_uint256)49_storage",
"contract": "FleekAccessPoints",
"src": "contracts/FleekAccessPoints.sol:211"
},
{
"label": "_appIds",
"offset": 0,
"slot": "355",
"type": "t_uint256",
"contract": "FleekERC721",
"src": "contracts/FleekERC721.sol:53"
},
{
"label": "_apps",
"offset": 0,
"slot": "356",
"type": "t_mapping(t_uint256,t_struct(Token)7203_storage)",
"contract": "FleekERC721",
"src": "contracts/FleekERC721.sol:54"
},
{
"label": "_tokenVerifier",
"offset": 0,
"slot": "357",
"type": "t_mapping(t_uint256,t_address)",
"contract": "FleekERC721",
"src": "contracts/FleekERC721.sol:55"
},
{
"label": "_tokenVerified",
"offset": 0,
"slot": "358",
"type": "t_mapping(t_uint256,t_bool)",
"contract": "FleekERC721",
"src": "contracts/FleekERC721.sol:56"
}
],
"types": {
"t_address": {
"label": "address",
"numberOfBytes": "20"
},
"t_array(t_uint256)44_storage": {
"label": "uint256[44]",
"numberOfBytes": "1408"
},
"t_array(t_uint256)49_storage": {
"label": "uint256[49]",
"numberOfBytes": "1568"
},
"t_array(t_uint256)50_storage": {
"label": "uint256[50]",
"numberOfBytes": "1600"
},
"t_bool": {
"label": "bool",
"numberOfBytes": "1"
},
"t_enum(AccessPointCreationStatus)4970": {
"label": "enum FleekAccessPoints.AccessPointCreationStatus",
"members": [
"DRAFT",
"APPROVED",
"REJECTED",
"REMOVED"
],
"numberOfBytes": "1"
},
"t_enum(Billing)5452": {
"label": "enum FleekBilling.Billing",
"members": [
"Mint",
"AddAccessPoint"
],
"numberOfBytes": "1"
},
"t_enum(CollectionRoles)4485": {
"label": "enum FleekAccessControl.CollectionRoles",
"members": [
"Owner",
"Verifier"
],
"numberOfBytes": "1"
},
"t_enum(TokenRoles)4487": {
"label": "enum FleekAccessControl.TokenRoles",
"members": [
"Controller"
],
"numberOfBytes": "1"
},
"t_mapping(t_address,t_bool)": {
"label": "mapping(address => bool)",
"numberOfBytes": "32"
},
"t_mapping(t_address,t_mapping(t_address,t_bool))": {
"label": "mapping(address => mapping(address => bool))",
"numberOfBytes": "32"
},
"t_mapping(t_address,t_uint256)": {
"label": "mapping(address => uint256)",
"numberOfBytes": "32"
},
"t_mapping(t_enum(Billing)5452,t_uint256)": {
"label": "mapping(enum FleekBilling.Billing => uint256)",
"numberOfBytes": "32"
},
"t_mapping(t_enum(CollectionRoles)4485,t_mapping(t_address,t_bool))": {
"label": "mapping(enum FleekAccessControl.CollectionRoles => mapping(address => bool))",
"numberOfBytes": "32"
},
"t_mapping(t_enum(CollectionRoles)4485,t_uint256)": {
"label": "mapping(enum FleekAccessControl.CollectionRoles => uint256)",
"numberOfBytes": "32"
},
"t_mapping(t_enum(TokenRoles)4487,t_mapping(t_address,t_bool))": {
"label": "mapping(enum FleekAccessControl.TokenRoles => mapping(address => bool))",
"numberOfBytes": "32"
},
"t_mapping(t_string_memory_ptr,t_struct(AccessPoint)4984_storage)": {
"label": "mapping(string => struct FleekAccessPoints.AccessPoint)",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_address)": {
"label": "mapping(uint256 => address)",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_bool)": {
"label": "mapping(uint256 => bool)",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_mapping(t_enum(TokenRoles)4487,t_mapping(t_address,t_bool)))": {
"label": "mapping(uint256 => mapping(enum FleekAccessControl.TokenRoles => mapping(address => bool)))",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_mapping(t_uint256,t_mapping(t_enum(TokenRoles)4487,t_mapping(t_address,t_bool))))": {
"label": "mapping(uint256 => mapping(uint256 => mapping(enum FleekAccessControl.TokenRoles => mapping(address => bool))))",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_struct(Build)7183_storage)": {
"label": "mapping(uint256 => struct IERCX.Build)",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_struct(Token)7203_storage)": {
"label": "mapping(uint256 => struct IERCX.Token)",
"numberOfBytes": "32"
},
"t_mapping(t_uint256,t_uint256)": {
"label": "mapping(uint256 => uint256)",
"numberOfBytes": "32"
},
"t_string_memory_ptr": {
"label": "string",
"numberOfBytes": "32"
},
"t_string_storage": {
"label": "string",
"numberOfBytes": "32"
},
"t_struct(AccessPoint)4984_storage": {
"label": "struct FleekAccessPoints.AccessPoint",
"members": [
{
"label": "tokenId",
"type": "t_uint256",
"offset": 0,
"slot": "0"
},
{
"label": "score",
"type": "t_uint256",
"offset": 0,
"slot": "1"
},
{
"label": "contentVerified",
"type": "t_bool",
"offset": 0,
"slot": "2"
},
{
"label": "nameVerified",
"type": "t_bool",
"offset": 1,
"slot": "2"
},
{
"label": "owner",
"type": "t_address",
"offset": 2,
"slot": "2"
},
{
"label": "status",
"type": "t_enum(AccessPointCreationStatus)4970",
"offset": 22,
"slot": "2"
}
],
"numberOfBytes": "96"
},
"t_struct(Build)7183_storage": {
"label": "struct IERCX.Build",
"members": [
{
"label": "commitHash",
"type": "t_string_storage",
"offset": 0,
"slot": "0"
},
{
"label": "gitRepository",
"type": "t_string_storage",
"offset": 0,
"slot": "1"
},
{
"label": "ipfsHash",
"type": "t_string_storage",
"offset": 0,
"slot": "2"
},
{
"label": "domain",
"type": "t_string_storage",
"offset": 0,
"slot": "3"
}
],
"numberOfBytes": "128"
},
"t_struct(Token)7203_storage": {
"label": "struct IERCX.Token",
"members": [
{
"label": "name",
"type": "t_string_storage",
"offset": 0,
"slot": "0"
},
{
"label": "description",
"type": "t_string_storage",
"offset": 0,
"slot": "1"
},
{
"label": "externalURL",
"type": "t_string_storage",
"offset": 0,
"slot": "2"
},
{
"label": "ENS",
"type": "t_string_storage",
"offset": 0,
"slot": "3"
},
{
"label": "logo",
"type": "t_string_storage",
"offset": 0,
"slot": "4"
},
{
"label": "color",
"type": "t_uint24",
"offset": 0,
"slot": "5"
},
{
"label": "currentBuild",
"type": "t_uint256",
"offset": 0,
"slot": "6"
},
{
"label": "builds",
"type": "t_mapping(t_uint256,t_struct(Build)7183_storage)",
"offset": 0,
"slot": "7"
}
],
"numberOfBytes": "256"
},
"t_uint24": {
"label": "uint24",
"numberOfBytes": "3"
},
"t_uint256": {
"label": "uint256",
"numberOfBytes": "32"
},
"t_uint8": {
"label": "uint8",
"numberOfBytes": "1"
}
}
}
}
}
}

View File

@ -0,0 +1,58 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.7;
import "@openzeppelin/contracts-upgradeable/token/ERC721/ERC721Upgradeable.sol";
import "@openzeppelin/contracts/utils/Base64.sol";
import "./util/FleekSVG.sol";
import "./FleekERC721.sol";
contract FleekApps is Initializable, ERC721Upgradeable {
using Strings for address;
using Base64 for bytes;
uint256 public bindCount;
mapping(uint256 => uint256) public bindings;
FleekERC721 private main;
/// @custom:oz-upgrades-unsafe-allow constructor
constructor() {
_disableInitializers();
}
function initialize(string memory _name, string memory _symbol, address _mainAddress) public initializer {
__ERC721_init(_name, _symbol);
main = FleekERC721(_mainAddress);
}
modifier _requireMainMinted(uint256 _tokenId) {
require(main.ownerOf(_tokenId) != address(0), "Main token does not exist");
_;
}
function mint(address _to, uint256 _tokenId) public _requireMainMinted(_tokenId) {
_mint(_to, bindCount);
bindings[bindCount] = _tokenId;
bindCount++;
}
function tokenURI(uint256 _bindId) public view virtual override(ERC721Upgradeable) returns (string memory) {
(string memory name, string memory ens, string memory logo, string memory color, string memory ipfsHash) = main
.getAppData(bindings[_bindId]);
// prettier-ignore
return string(abi.encodePacked(_baseURI(),
abi.encodePacked('{',
'"owner":"', ownerOf(_bindId).toHexString(), '",',
'"name":"', name, '",',
'"image":"', FleekSVG.generateBase64(name, ens, logo, color), '",',
'"external_url":"ipfs://', ipfsHash, '"',
'}').encode()
));
}
function _baseURI() internal view virtual override returns (string memory) {
return "data:application/json;base64,";
}
}

View File

@ -202,6 +202,15 @@ contract FleekERC721 is
return (app.name, app.description, app.externalURL, app.ENS, app.currentBuild, app.logo, app.color);
}
function getAppData(
uint256 tokenId
) public view returns (string memory, string memory, string memory, string memory, string memory) {
_requireMinted(tokenId);
Token storage app = _apps[tokenId];
return (app.name, app.ENS, app.logo, app.color.toColorString(), app.builds[app.currentBuild].ipfsHash);
}
/**
* @dev Returns the last minted tokenId.
*/

File diff suppressed because one or more lines are too long

View File

@ -1,5 +1,9 @@
{
"FleekERC721": [
{
"address": "0xce89f3853a8bE0b274b2aD6fa87F3f2b575164cE",
"timestamp": "6/15/2023, 9:03:37 AM"
},
{
"address": "0x8795608346Eb475E42e69F1281008AEAa522479D",
"timestamp": "3/17/2023, 3:01:30 PM"

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,5 +1,9 @@
{
"FleekERC721": [
{
"address": "0xdAbc1E0f926545a2898c644870FA4DC39E83EB70",
"timestamp": "5/17/2023, 5:27:20 PM"
},
{
"address": "0x37150709cFf366DeEaB836d05CAf49F4DA46Bb2E",
"timestamp": "3/3/2023, 4:43:25 PM"

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,8 @@
{
"FleekERC721": [
{
"address": "0x1CfD8455F189c56a4FBd81EB7D4118DB04616BA8",
"timestamp": "6/16/2023, 8:51:33 AM"
}
]
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,8 @@
{
"FleekERC721": [
{
"address": "0x40208b6aFfCc39CD42A25EC47B410Cfe117837D6",
"timestamp": "6/16/2023, 12:21:27 PM"
}
]
}

File diff suppressed because one or more lines are too long

View File

@ -9,7 +9,8 @@ import '@openzeppelin/hardhat-upgrades';
import * as dotenv from 'dotenv';
import { HardhatUserConfig } from 'hardhat/types';
import { task, types } from 'hardhat/config';
import deploy from './scripts/deploy';
import deployFleekERC721 from './scripts/deploy/deploy-fleek-erc721';
import deployFleekApps from './scripts/deploy/deploy-fleek-apps';
dotenv.config();
@ -17,16 +18,18 @@ const {
PRIVATE_KEY,
REPORT_GAS,
ETHERSCAN_API_KEY,
POLYGONSCAN_KEY,
POLYGON_API_URL,
ETH_MAIN_API_URL,
ETH_SEPOLIA_API_URL,
ETH_GOERLI_API_URL,
MAINNET_API_KEY,
COINMARKETCAP_KEY,
QANET_RPC_URL,
} = process.env;
const config: HardhatUserConfig = {
defaultNetwork: 'hardhat',
defaultNetwork: 'local',
networks: {
hardhat: {
chainId: 31337,
@ -57,6 +60,15 @@ const config: HardhatUserConfig = {
accounts: PRIVATE_KEY ? [PRIVATE_KEY] : [],
chainId: 1,
},
qanet: {
url: QANET_RPC_URL ? QANET_RPC_URL : '',
accounts: PRIVATE_KEY ? [PRIVATE_KEY] : [],
chainId: 31337,
},
local: {
url: 'http://localhost:8545',
accounts: PRIVATE_KEY ? [PRIVATE_KEY] : [],
},
},
gasReporter: {
enabled: REPORT_GAS === 'true' || false,
@ -86,10 +98,12 @@ const config: HardhatUserConfig = {
timeout: 200000, // 200 seconds max for running tests
},
etherscan: {
// apiKey: {
// polygonMumbai: POLYGONSCAN_KEY,
// },
apiKey: ETHERSCAN_API_KEY ? ETHERSCAN_API_KEY : '',
apiKey: {
polygonMumbai: POLYGONSCAN_KEY ? POLYGONSCAN_KEY : '',
mainnet: ETHERSCAN_API_KEY ? ETHERSCAN_API_KEY : '',
goerli: ETHERSCAN_API_KEY ? ETHERSCAN_API_KEY : '',
sepolia: ETHERSCAN_API_KEY ? ETHERSCAN_API_KEY : '',
},
},
};
@ -97,9 +111,9 @@ export default config;
// Use the following command to deploy where the network flag can be replaced with the network you choose:
// npx hardhat deploy --network goerli --new-proxy-instance --name "FleekNFAs" --symbol "FLKNFA" --billing "[10000, 20000]"
task('deploy', 'Deploy the contracts')
task('deploy:FleekERC721', 'Deploy the FleekERC721 contract')
.addFlag('newProxyInstance', 'Force to deploy a new proxy instance')
.addOptionalParam('name', 'The collection name', 'FleekNFAs', types.string)
.addOptionalParam('name', 'The collection name', 'Fleek NFAs', types.string)
.addOptionalParam('symbol', 'The collection symbol', 'FLKNFA', types.string)
.addOptionalParam(
'billing',
@ -107,4 +121,10 @@ task('deploy', 'Deploy the contracts')
[],
types.json
)
.setAction(deploy);
.setAction(deployFleekERC721);
task('deploy:FleekApps', 'Deploy the FleekApps contract')
.addFlag('newProxyInstance', 'Force to deploy a new proxy instance')
.addOptionalParam('name', 'The collection name', 'NFA - Apps', types.string)
.addOptionalParam('symbol', 'The collection symbol', 'NFAA', types.string)
.setAction(deployFleekApps);

View File

@ -6,13 +6,11 @@
"scripts": {
"test": "yarn test:hardhat && yarn test:foundry",
"test:foundry": "forge test -vvv --fork-url mainnet --fork-block-number 16876149",
"test:hardhat": "hardhat test",
"test:hardhat": "hardhat test --network hardhat",
"format": "prettier --write \"./**/*.{js,json,sol,ts}\"",
"node:hardhat": "hardhat node",
"deploy:hardhat": "hardhat deploy --network hardhat",
"deploy:mumbai": "hardhat deploy --network mumbai",
"deploy:sepolia": "hardhat deploy --network sepolia",
"deploy:goerli": "hardhat deploy --network goerli",
"deploy:FleekERC721": "hardhat deploy:FleekERC721",
"deploy:FleekApps": "hardhat deploy:FleekApps",
"compile": "hardhat compile",
"verify:mumbai": "npx hardhat run ./scripts/verify.js --network mumbai",
"verify:goerli": "npx hardhat run ./scripts/verify.js --network goerli",

View File

@ -1,105 +0,0 @@
const {
deployStore,
getCurrentAddressIfSameBytecode,
} = require('./utils/deploy-store');
const { getProxyAddress, proxyStore } = require('./utils/proxy-store');
// --- Script Settings ---
const CONTRACT_NAME = 'FleekERC721';
const DEFAULT_PROXY_SETTINGS = {
unsafeAllow: ['external-library-linking'],
};
const LIBRARIES_TO_DEPLOY = ['FleekSVG'];
const libraryDeployment = async (hre) => {
console.log('Deploying Libraries...');
const libraries = {};
for (const lib of LIBRARIES_TO_DEPLOY) {
const libAddress = await getCurrentAddressIfSameBytecode(lib);
if (libAddress) {
console.log(`Library "${lib}" already deployed at ${libAddress}`);
libraries[lib] = libAddress;
continue;
}
const libContract = await hre.ethers.getContractFactory(lib);
const libInstance = await libContract.deploy();
await libInstance.deployed();
await deployStore(hre.network.name, lib, libInstance, false);
console.log(`Library "${lib}" deployed at ${libInstance.address}`);
libraries[lib] = libInstance.address;
}
return libraries;
};
module.exports = async (taskArgs, hre) => {
const { newProxyInstance, name, symbol, billing } = taskArgs;
const network = hre.network.name;
console.log(':: Starting Deployment ::');
console.log('Network:', network);
console.log('Contract:', CONTRACT_NAME);
console.log(':: Arguments ::');
console.log(taskArgs);
console.log();
const deployArguments = [name, symbol, billing];
const libraries = await libraryDeployment(hre);
const Contract = await ethers.getContractFactory(CONTRACT_NAME, {
libraries,
});
const proxyAddress = await getProxyAddress(CONTRACT_NAME, network);
let deployResult;
try {
if (!proxyAddress || newProxyInstance)
throw new Error('new-proxy-instance');
console.log(`Trying to upgrade proxy contract at: "${proxyAddress}"`);
deployResult = await upgrades.upgradeProxy(
proxyAddress,
Contract,
DEFAULT_PROXY_SETTINGS
);
console.log('\x1b[32m');
console.log(
`Contract ${CONTRACT_NAME} upgraded at "${deployResult.address}" by account "${deployResult.signer.address}"`
);
console.log('\x1b[0m');
} catch (e) {
if (
e.message === 'new-proxy-instance' ||
e.message.includes("doesn't look like an ERC 1967 proxy")
) {
console.log(`Failed to upgrade proxy contract: "${e.message?.trim()}"`);
console.log('Creating new proxy contract...');
deployResult = await upgrades.deployProxy(
Contract,
deployArguments,
DEFAULT_PROXY_SETTINGS
);
await deployResult.deployed();
await proxyStore(CONTRACT_NAME, deployResult.address, network);
console.log('\x1b[32m');
console.log(
`Contract ${CONTRACT_NAME} deployed at "${deployResult.address}" by account "${deployResult.signer.address}"`
);
console.log('\x1b[0m');
} else {
throw e;
}
try {
await deployStore(network, CONTRACT_NAME, deployResult);
} catch (e) {
console.error('Could not write deploy files', e);
}
}
return deployResult;
};

View File

@ -0,0 +1,31 @@
import { HardhatRuntimeEnvironment } from 'hardhat/types';
import { deployLibraries } from './deploy-libraries';
import { deployContractWithProxy } from './deploy-proxy-contract';
import { getContract } from '../util';
import { Contract } from 'ethers';
type TaskArgs = {
newProxyInstance: boolean;
name: string;
symbol: string;
};
export default async (
{ newProxyInstance, name, symbol }: TaskArgs,
hre: HardhatRuntimeEnvironment
): Promise<Contract> => {
console.log('Deploying FleekApps...');
const libraries = await deployLibraries(['FleekSVG'], hre);
const mainContract = await getContract('FleekERC721');
return deployContractWithProxy(
{
name: 'FleekApps',
newProxyInstance,
args: [name, symbol, mainContract.address],
libraries,
},
hre
);
};

View File

@ -0,0 +1,29 @@
import { HardhatRuntimeEnvironment } from 'hardhat/types';
import { deployLibraries } from './deploy-libraries';
import { deployContractWithProxy } from './deploy-proxy-contract';
import { Contract } from 'ethers';
type TaskArgs = {
newProxyInstance: boolean;
name: string;
symbol: string;
billing: number[];
};
export default async (
{ newProxyInstance, name, symbol, billing }: TaskArgs,
hre: HardhatRuntimeEnvironment
): Promise<Contract> => {
console.log('Deploying FleekERC721...');
const libraries = await deployLibraries(['FleekSVG'], hre);
return deployContractWithProxy(
{
name: 'FleekERC721',
newProxyInstance,
args: [name, symbol, billing],
libraries,
},
hre
);
};

View File

@ -0,0 +1,30 @@
import {
deployStore,
getCurrentAddressIfSameBytecode,
} from '../utils/deploy-store';
import { HardhatRuntimeEnvironment } from 'hardhat/types';
export const deployLibraries = async (
librariesToDeploy: string[],
hre: HardhatRuntimeEnvironment
) => {
console.log('Deploying Libraries...');
const libraries: Record<string, string> = {};
for (const lib of librariesToDeploy) {
const libAddress: string = await getCurrentAddressIfSameBytecode(lib);
if (libAddress) {
console.log(`Library "${lib}" already deployed at ${libAddress}`);
libraries[lib] = libAddress;
continue;
}
const libContract = await hre.ethers.getContractFactory(lib);
const libInstance = await libContract.deploy();
await libInstance.deployed();
await deployStore(hre.network.name, lib, libInstance, false);
console.log(`Library "${lib}" deployed at ${libInstance.address}`);
libraries[lib] = libInstance.address;
}
return libraries;
};

View File

@ -0,0 +1,88 @@
import { getProxyAddress, proxyStore } from '../utils/proxy-store';
import { deployStore } from '../utils/deploy-store';
import { UpgradeProxyOptions } from '@openzeppelin/hardhat-upgrades/dist/utils';
import { Contract } from 'ethers';
import { HardhatRuntimeEnvironment } from 'hardhat/types';
const DEFAULT_PROXY_SETTINGS: UpgradeProxyOptions = {
unsafeAllow: ['external-library-linking'],
};
type DeployContractArgs = {
name: string;
newProxyInstance: boolean;
args: unknown[];
libraries?: Record<string, string>;
};
export const deployContractWithProxy = async (
{ name, newProxyInstance, args, libraries }: DeployContractArgs,
hre: HardhatRuntimeEnvironment
): Promise<Contract> => {
// const { newProxyInstance, name, symbol, billing } = taskArgs;
const network = hre.network.name;
console.log(`Deploying: ${name}`);
console.log('Arguments:', args);
console.log();
const Contract = await hre.ethers.getContractFactory(name, {
libraries,
});
const proxyAddress = await getProxyAddress(name, network);
let deployResult;
try {
if (!proxyAddress || newProxyInstance)
throw new Error('new-proxy-instance');
console.log(`Trying to upgrade proxy contract at: "${proxyAddress}"`);
deployResult = await hre.upgrades.upgradeProxy(
proxyAddress,
Contract,
DEFAULT_PROXY_SETTINGS
);
console.log('\x1b[32m');
console.log(
`Contract ${name} upgraded at "${
deployResult.address
}" by account "${await deployResult.signer.getAddress()}"`
);
console.log('\x1b[0m');
} catch (e) {
if (
e instanceof Error &&
(e.message === 'new-proxy-instance' ||
e.message.includes("doesn't look like an ERC 1967 proxy"))
) {
console.log(`Failed to upgrade proxy contract: "${e.message?.trim()}"`);
console.log('Creating new proxy contract...');
deployResult = await hre.upgrades.deployProxy(
Contract,
args,
DEFAULT_PROXY_SETTINGS
);
await deployResult.deployed();
await proxyStore(name, deployResult.address, network);
console.log('\x1b[32m');
console.log(
`Contract ${name} deployed at "${
deployResult.address
}" by account "${await deployResult.signer.getAddress()}"`
);
console.log('\x1b[0m');
} else {
throw e;
}
try {
await deployStore(network, name, deployResult);
} catch (e) {
console.error('Could not write deploy files', e);
}
}
return deployResult;
};

View File

@ -0,0 +1,18 @@
// npx hardhat run scripts/generate-image.ts --network local
import { getContract } from './util';
export const generateImage = async (
name: string,
ens: string,
logo: string,
color: string
) => {
const contract = await getContract('FleekSVG');
const svg = await contract.generateBase64(name, ens, logo, color);
console.log('SVG:', svg);
};
generateImage('Fleek', '', '', '#123456');

View File

@ -0,0 +1,15 @@
// npx hardhat run scripts/get-app.ts --network local
import { getContract, parseDataURI } from './util';
const getApp = async (tokenId: number) => {
const contract = await getContract('FleekApps');
const transaction = await contract.tokenURI(tokenId);
const parsed = parseDataURI(transaction);
console.log('App:', parsed);
};
getApp(0);

View File

@ -0,0 +1,16 @@
// npx hardhat run scripts/mint-app.ts --network local
import { getContract } from './util';
const mintApp = async (nfaId: number) => {
const contract = await getContract('FleekApps');
const transaction = await contract.mint(
'0x7ed735b7095c05d78df169f991f2b7f1a1f1a049',
nfaId
);
console.log('Minted app', transaction.hash);
};
mintApp(0);

View File

@ -44,8 +44,9 @@ const DEFAULT_MINTS = {
'aave', // name
'Earn interest, borrow assets, and build applications', // description
'https://aave.com/', // external url
'aave.eth', // ens
'6ea6ad16c46ae85faced7e50555ff7368422f57', // commit hash
'', // ens
'6ea6ad16c46ae85faced7e50555ff7368422f57', // commit hash,
'bafybeifc5pgon43a2xoeevwq45ftwghzbgtjxc7k4dqlzhqh432wpahigm', // ipfs hash
'https://github.com/org/repo', // repo
path.resolve(__dirname, '../assets/aave.svg'), // svg
],
@ -53,9 +54,10 @@ const DEFAULT_MINTS = {
'Uniswap', // name
'Swap, earn, and build on the leading decentralized crypto trading protocol', // description
'https://uniswap.org/', // external url
'uniswap.eth', // ens
'', // ens
'6ea6ad16c46ae85faced7e50555ff7368422f57', // commit hash
'https://github.com/org/repo', // repo
'bafybeidwf6m2lhkdifuxqucgaq547bwyxk2mljwmazvhmyryjr6yjoe3nu', // ipfs hash
path.resolve(__dirname, '../assets/uniswap.svg'), // svg
],
yearn: [
@ -78,7 +80,7 @@ const DEFAULT_MINTS = {
],
};
const params = DEFAULT_MINTS.fleek;
const params = DEFAULT_MINTS.uniswap;
const mintTo = '0x7ED735b7095C05d78dF169F991f2b7f1A1F1A049';
const verifier = '0x7ED735b7095C05d78dF169F991f2b7f1A1F1A049';
@ -90,11 +92,15 @@ const verifier = '0x7ED735b7095C05d78dF169F991f2b7f1A1F1A049';
console.log('SVG Path: ', svgPath);
params.push(await getSVGBase64(svgPath));
console.log('SVG length: ', params[params.length - 1].length);
params.push(await getSVGColor(svgPath));
params.push(
(await getSVGColor(svgPath))
.reduce((a, b, i) => a | (b << ((2 - i) * 8)), 0)
.toString()
);
params.push(false);
params.push(verifier);
const transaction = await contract.mint(...params);
console.log('Response: ', transaction);
console.log('Response: ', transaction.hash);
})();

View File

@ -0,0 +1,13 @@
// npx hardhat run scripts/owner-of.ts --network local
import { getContract } from './util';
const ownerOf = async (tokenId: number) => {
const contract = await getContract('FleekERC721');
const owner = await contract.ownerOf(tokenId);
console.log('Owner:', owner);
};
ownerOf(0);

View File

@ -1,5 +1,5 @@
// npx hardhat run scripts/tokenURI.js --network mumbai/sepolia/goerli
const { getContract } = require('./util');
const { getContract, parseDataURI } = require('./util');
// TODO: make this arguments
const tokenId = 0;
@ -9,9 +9,7 @@ const tokenId = 0;
const transaction = await contract.tokenURI(tokenId);
const parsed = JSON.parse(
Buffer.from(transaction.slice(29), 'base64').toString('utf-8')
);
const parsed = parseDataURI(transaction);
console.log('Response: ', parsed);
})();

View File

@ -1,14 +1,26 @@
module.exports.getContract = async function (contractName) {
const proxyDeployments =
require(`../deployments/${hre.network.name}/proxy.json`)[contractName];
const deployment = require(`../deployments/${hre.network.name}/${contractName}.json`);
if (!proxyDeployments || !proxyDeployments.length) {
if (!deployment) {
throw new Error(
`No proxy deployments found for "${contractName}" under "${hre.network.name}"`
`No deployment found for "${contractName}" under "${hre.network.name}"`
);
}
const latestDeployment = proxyDeployments[0];
console.log(`Using latest deployment for "${deployment.address}":`);
return hre.ethers.getContractAt(contractName, latestDeployment.address);
return hre.ethers.getContractAt(contractName, deployment.address);
};
module.exports.parseDataURI = function (dataURI) {
if (!dataURI.startsWith('data:')) throw new Error('Invalid data URI');
const content = dataURI.replace('data:', '');
const [type, data] = content.split(';base64,');
switch (type) {
case 'application/json':
return JSON.parse(Buffer.from(data, 'base64').toString('utf-8'));
default:
throw new Error(`Unsupported data URI type: ${type}`);
}
};

View File

@ -81,7 +81,20 @@ const getCurrentAddressIfSameBytecode = async (contractName) => {
hre.network.name,
contractName
));
return deployData.bytecode === bytecode ? deployData.address : null;
if (deployData.bytecode === bytecode) {
try {
const contract = await hre.ethers.getContractAt(
contractName,
deployData.address
);
return contract.address;
} catch {
console.log(
`Contract ${contractName} at ${deployData.address} is not deployed`
);
}
}
}
return null;

View File

@ -2,11 +2,11 @@ import { expect } from 'chai';
import * as hre from 'hardhat';
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
import deploy from '../../../scripts/deploy';
import deploy from '../../../scripts/deploy/deploy-fleek-erc721';
import { getImplementationAddress } from '@openzeppelin/upgrades-core';
import { Contract } from 'ethers';
import { loadFixture } from '@nomicfoundation/hardhat-network-helpers';
import { Errors, TestConstants } from '../contracts/FleekERC721/helpers';
import { TestConstants } from '../contracts/FleekERC721/helpers';
const taskArgs = {
newProxyInstance: false,
@ -28,7 +28,7 @@ const getImplementationContract = async (
const deployFixture = async () => {
const [owner] = await hre.ethers.getSigners();
const proxy = (await deploy(taskArgs, hre)) as Contract;
const proxy = await deploy(taskArgs, hre);
const implementation = await getImplementationContract(proxy.address);

View File

@ -1,7 +1,5 @@
# Environment variables declared in this file are automatically made available to Prisma.
# See the documentation for more detail: https://pris.ly/d/prisma-schema#accessing-environment-variables-from-the-schema
# Prisma supports the native connection string format for PostgreSQL, MySQL, SQLite, SQL Server, MongoDB and CockroachDB.
# See the documentation for all the connection string options: https://pris.ly/d/connection-strings
DATABASE_URL="mongodb+srv://root:randompassword@cluster0.ab1cd.mongodb.net/mydb?retryWrites=true&w=majority"
DATABASE_URL="mongodb+srv://root:randompassword@cluster0.ab1cd.mongodb.net/mydb?retryWrites=true&w=majority"
BUNNY_CDN_ACCESS_KEY=""
CONTRACT_ADDRESS=""
PRIVATE_KEY=""
JSON_RPC=""

View File

@ -12,3 +12,6 @@ yarn-error.log
# output
dist
# Lambda Layers
lambda-layers-*

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 MiB

View File

@ -5,10 +5,11 @@
"main": "index.js",
"scripts": {
"build": "yarn tsc",
"invoke:build": "yarn build && serverless invoke local --function submitBuildInfo",
"prisma:generate": "npx prisma generate",
"prisma:pull": "npx prisma db pull --force",
"start": "serverless offline"
"start": "yarn build && serverless offline",
"deploy:dev": "sh ./scripts/deploy.sh dev",
"deploy:prd": "sh ./scripts/deploy.sh prd"
},
"author": "fleek",
"license": "MIT",
@ -20,6 +21,7 @@
"esbuild": "^0.17.12",
"json-schema-to-ts": "^2.7.2",
"serverless": "^3.28.1",
"serverless-dotenv-plugin": "^6.0.0",
"serverless-esbuild": "^1.42.0",
"serverless-offline": "^12.0.4",
"ts-node": "^10.9.1",
@ -33,7 +35,7 @@
"@prisma/client": "^4.13.0",
"@types/node": "^18.15.11",
"aws-sdk": "^2.1342.0",
"dotenv": "^16.0.3",
"ethers": "5.7.2",
"prisma": "^4.13.0",
"ts-node": "^10.9.1",
"typescript": "^5.0.4",

View File

@ -1,5 +1,6 @@
generator client {
provider = "prisma-client-js"
binaryTargets = ["native", "rhel-openssl-1.0.x"]
}
datasource db {
@ -25,3 +26,11 @@ model tokens {
tokenId Int
verified Boolean
}
model zones {
id String @id @default(auto()) @map("_id") @db.ObjectId
zoneId Int // The returned id from the creation call
name String // The assigned name at the time of creation
hostname String // The target domain that's assigned as hostname
sourceDomain String // The origin URL
}

120
serverless/scripts/deploy.sh Executable file
View File

@ -0,0 +1,120 @@
#!/bin/bash
bold=$(tput bold)
normal=$(tput sgr0)
source .env
echo "${bold}Starting the deployment process${normal}"
# Default value for the stage variable
stage="dev"
# Parse command line options using getopts
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
--stage)
shift
stage="$1"
;;
*)
# Ignore unknown options or arguments
;;
esac
shift
done
# Check if the stage variable has a non-empty value
if [ -n "$stage" ]; then
echo "Passed stage value: $stage"
else
echo "Stage flag not provided or value not specified."
echo "Will proceed with the default value: dev"
fi
echo "${bold}Installing dependencies via Yarn${normal}"
yarn
if [[ -z "${DATABASE_URL}" ]]; then
printf "%s" "Enter the Mongo Database URL: "
read -r DB_URL
export DATABASE_URL=$DB_URL
fi
if [[ -z "${JSON_RPC}" ]]; then
printf "%s" "Enter the JSON RPC endpoint: "
read -r JSON_RPC
export JSON_RPC=$JSON_RPC
fi
if [[ -z "${CONTRACT_ADDRESS}" ]]; then
printf "%s" "Enter the contract address: "
read -r CONTRACT_ADDRESS
export CONTRACT_ADDRESS=$CONTRACT_ADDRESS
fi
if [[ -z "${PRIVATE_KEY}" ]]; then
printf "%s" "Enter the private key: "
read -r PRIVATE_KEY
export PRIVATE_KEY=$PRIVATE_KEY
fi
if [[ -z "${AWS_ACCESS_KEY_ID}" ]]; then
printf "%s" "Enter the AWS access key ID: "
read -r AWS_ACCESS_KEY_ID
export AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID
fi
if [[ -z "${AWS_SECRET_ACCESS_KEY}" ]]; then
printf "%s" "Enter the AWS secret access key: "
read -r AWS_SECRET_ACCESS_KEY
export AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY
fi
echo "${bold}Copying the Prisma schema file to function directories${normal}"
cp prisma/schema.prisma dist/src/functions/builds/
cp prisma/schema.prisma dist/src/functions/mints/
echo "${bold}Generating Prisma Client${normal}"
yarn prisma:generate
echo "${bold}Running the build command${normal}"
yarn build
echo "${bold}Copying the rhel openssl engine to dist/${normal}"
cp node_modules/.prisma/client/libquery_engine-rhel-openssl-1.0.x.so.node dist/src/functions/mints
cp node_modules/.prisma/client/libquery_engine-rhel-openssl-1.0.x.so.node dist/src/functions/builds
cp node_modules/.prisma/client/libquery_engine-rhel-openssl-1.0.x.so.node dist/src/functions/apps
echo "${bold}Copying the .env file to dist/${normal}"
cp .env src/
echo "${bold}Copying the FleekERC721.json file to dist/serverless/src/libs${normal}"
cp src/libs/FleekERC721.json dist/src/libs/
echo "${bold}Copying the Prisma schema file to function directories${normal}"
cp prisma/schema.prisma dist/src/functions/builds/
cp prisma/schema.prisma dist/src/functions/mints/
cp prisma/schema.prisma dist/src/functions/apps/
echo "${bold}Creating layer zip files${normal}"
/bin/bash ./scripts/prepare-libs-lambda-layer.sh
/bin/bash ./scripts/prepare-prisma-client-lambda-layer.sh
/bin/bash ./scripts/prepare-node-modules-lambda-layer.sh
echo "${bold}Deploying to AWS lambda${normal}"
yarn sls deploy --stage "$stage" --verbose
# step 0 -> run yarn
# step 1 -> take params (env variables)
# step 2 -> build tsc files with yarn build
# step 3 -> run yarn generate:prisma
# step 4 -> run the other shell scripts for layer generation
# step 5 -> run the deployment command to aws
# TODO the .env file needs to move to the inside of the serverless dir in dist (zip file)
# TODO Prisma.schema files aren't being packaged with the functions in the nfa-serverless.zip file in .serverless.

View File

@ -0,0 +1,23 @@
#!/bin/bash
function prepare_libs_lambda_layer() {
echo "Cleaning up ..."
rm -rf lambda-layers-libs
echo "Creating layer ..."
mkdir -p lambda-layers-libs/nodejs/node_modules/@libs
echo "Prepare libs lambda layer ..."
cp -r dist/serverless/src/libs/* lambda-layers-libs/nodejs/node_modules/@libs/
echo "Compressing ..."
pushd lambda-layers-libs && tar -zcf /tmp/nodejs.tar.gz . && mv /tmp/nodejs.tar.gz ./nodejs.tar.gz
echo "Remove unzipped files ..."
rm -rf nodejs
echo "Stats:"
ls -lh nodejs.tar.gz
popd
}
prepare_libs_lambda_layer

View File

@ -0,0 +1,27 @@
#!/bin/bash
function prepare_node_modules_lambda_layer() {
echo "Cleaning up workspace ..."
rm -rf lambda-layers-node_modules
echo "Creating layer ..."
mkdir -p lambda-layers-node_modules/nodejs
echo "Prepare server node_modules lambda layer ..."
cp -r node_modules lambda-layers-node_modules/nodejs
echo "Remove Prisma..."
rm -rf lambda-layers-node_modules/nodejs/node_modules/@prisma
rm -rf lambda-layers-node_modules/nodejs/node_modules/.prisma
echo "Compressing ..."
pushd lambda-layers-node_modules && tar -zcf /tmp/nodejs.tar.gz . && mv /tmp/nodejs.tar.gz ./nodejs.tar.gz
echo "Remove unzipped files ..."
rm -rf nodejs
echo "Stats:"
ls -lh nodejs.tar.gz
popd
}
prepare_node_modules_lambda_layer

View File

@ -0,0 +1,28 @@
#!/bin/bash
function prepare_prisma_client_lambda_layer() {
echo "Cleaning up workspace ..."
rm -rf lambda-layers-prisma-client
echo "Creating layer ..."
mkdir -p lambda-layers-prisma-client/nodejs/node_modules/.prisma
mkdir -p lambda-layers-prisma-client/nodejs/node_modules/@prisma
echo "Prepare Prisma Client lambda layer ..."
cp -r node_modules/.prisma/client lambda-layers-prisma-client/nodejs/node_modules/.prisma
cp -r node_modules/@prisma lambda-layers-prisma-client/nodejs/node_modules
echo "Remove Prisma CLI..."
rm -rf lambda-layers-prisma-client/nodejs/node_modules/@prisma/cli
echo "Compressing ..."
pushd lambda-layers-prisma-client && tar -zcf /tmp/nodejs.tar.gz . && mv /tmp/nodejs.tar.gz ./nodejs.tar.gz
echo "Remove unzipped files ..."
rm -rf nodejs
echo "Stats:"
ls -lh nodejs.tar.gz
popd
}
prepare_prisma_client_lambda_layer

View File

@ -6,6 +6,7 @@ plugins:
- serverless-offline
provider:
timeout: 150
name: aws
runtime: nodejs18.x
stage: ${opt:stage, 'prd'}
@ -18,6 +19,18 @@ provider:
AWS_STAGE: ${self:provider.stage}
AWS_NODEJS_CONNECTION_REUSE_ENABLED: 1
layers:
TopicAwsNodeModules:
path: lambda-layers-node_modules
TopicAwsLibs:
path: lambda-layers-libs
TopicPrismaAwsPrismaClient:
path: lambda-layers-prisma-client
package: {
patterns: ["dist/**/*.prisma", "**/libquery_engine-rhel-openssl-1.0.x.so.node", "dist/serverless/.env", '!node_modules/**'],
}
custom:
esbuild:
bundle: true
@ -30,28 +43,61 @@ custom:
concurrency: 10
functions:
submitBuildInfo:
handler: src/functions/builds/handler.submitBuildInfo # Change `src` to `dist` for deployment
# Deployment:
handler: ./dist/src/functions/builds/handler.submitBuildInfo # TODO This will not work, need to change to nfa-serverless/dist/serverless/src/functions/builds/handler.submitBuildInfo
# Local development:
# handler: ./src/functions/builds/handler.submitBuildInfo
events:
- http:
path: build
method: post
cors: true
request:
parameters:
querystrings:
githubOrg: true
githubRepo: true
commitHash: true
author: true
timestamp: true
ipfsHash: true
tokenId: true
environment: # TODO They won't be loaded from the shell environment, need to find a way to pass them from the deployment script
NODE_ENV: production
# DATABASE_URL: ${env:DATABASE_URL}
# CONTRACT_ADDRESS: ${env:CONTRACT_ADDRESS}
# PRIVATE_KEY: ${env:PRIVATE_KEY}
# JSON_RPC: ${env:JSON_RPC}
layers:
- { Ref: TopicAwsNodeModulesLambdaLayer }
- { Ref: TopicAwsLibsLambdaLayer }
- { Ref: TopicPrismaAwsPrismaClientLambdaLayer }
submitMintInfo:
handler: src/functions/mints/handler.submitMintInfo
# Deployment:
handler: ./dist/src/functions/mints/handler.submitMintInfo # TODO This will not work, need to change to nfa-serverless/dist/serverless/src/functions/mints/handler.submitMintInfo
# Local development:
# handler: ./src/functions/mints/handler.submitMintInfo
events:
- http:
path: mint
method: post
cors: true
cors: true
environment: # TODO They won't be loaded from the shell environment, need to find a way to pass them from the deployment script
NODE_ENV: production
# DATABASE_URL: ${env:DATABASE_URL}
# CONTRACT_ADDRESS: ${env:CONTRACT_ADDRESS}
# PRIVATE_KEY: ${env:PRIVATE_KEY}
# JSON_RPC: ${env:JSON_RPC}
layers:
- { Ref: TopicAwsNodeModulesLambdaLayer }
- { Ref: TopicAwsLibsLambdaLayer }
- { Ref: TopicPrismaAwsPrismaClientLambdaLayer }
verifyAccessPoint:
handler: ./dist/src/functions/apps/handler.verifyApp
events:
- http:
path: verifyApp
method: post
cors: true
submitAppInfo:
handler: ./dist/src/functions/apps/handler.submitAppInfo
events:
- http:
path: app
method: post
cors: true

View File

@ -0,0 +1,183 @@
import { APIGatewayProxyResult, APIGatewayEvent } from 'aws-lambda';
import { formatJSONResponse } from '@libs/api-gateway';
import * as dotenv from 'dotenv';
import { v4 } from 'uuid';
import { prisma } from '@libs/prisma';
import {
BunnyCdn,
BunnyCdnError,
CreatePullZoneMethodArgs,
LoadFreeCertificateMethodArgs,
} from '@libs/bunnyCDN';
import { isTheSignatureValid } from '@libs/verify-signature';
export const verifyApp = async (
event: APIGatewayEvent
): Promise<APIGatewayProxyResult> => {
try {
// Check the parameters and environment variables
dotenv.config();
if (event.body === null || process.env.BUNNY_CDN_ACCESS_KEY === undefined) {
return formatJSONResponse({
status: 422,
message: 'Required parameters were not passed.',
});
}
// Check the lambda-signature and confirm the value of the FE_SIGNING_KEY env variable.
// If both are valid, verify the authenticity of the request.
if (event.headers['lambda-signature'] === undefined)
throw Error("Header field 'lambda-signature' was not found.");
if (process.env.FE_SIGNING_KEY === undefined)
throw Error('FE_SIGNING_KEY env variable not found.');
else if (
!isTheSignatureValid(
event.body,
event.headers['lambda-signature'],
process.env.FE_SIGNING_KEY
)
) {
return formatJSONResponse({
status: 401,
message: 'Unauthorized',
});
}
// Set up constants
const bunnyCdn = new BunnyCdn(process.env.BUNNY_CDN_ACCESS_KEY);
const hostname = JSON.parse(event.body).hostname;
let args: LoadFreeCertificateMethodArgs = {
hostname,
};
await bunnyCdn.loadFreeCertificate(args);
return formatJSONResponse({
status: true,
});
} catch (e) {
return formatJSONResponse({
status: 500,
message: e,
});
}
};
export const submitAppInfo = async (
event: APIGatewayEvent
): Promise<APIGatewayProxyResult> => {
try {
// Check the parameters and environment variables
dotenv.config();
if (event.body === null || process.env.BUNNY_CDN_ACCESS_KEY === undefined) {
return formatJSONResponse({
status: 422,
message: 'Required parameters were not passed.',
});
}
// Check the lambda-signature and confirm the value of the FE_SIGNING_KEY env variable.
// If both are valid, verify the authenticity of the request.
if (event.headers['lambda-signature'] === undefined)
throw Error("Header field 'lambda-signature' was not found.");
if (process.env.FE_SIGNING_KEY === undefined)
throw Error('FE_SIGNING_KEY env variable not found.');
else if (
!isTheSignatureValid(
event.body,
event.headers['lambda-signature'],
process.env.FE_SIGNING_KEY
)
) {
return formatJSONResponse({
status: 401,
message: 'Unauthorized',
});
}
// Set up constants
const bunnyCdn = new BunnyCdn(process.env.BUNNY_CDN_ACCESS_KEY);
const data = JSON.parse(event.body);
const appInfo = {
apId: 'null',
createdAt: new Date().toISOString(),
sourceDomain: data.sourceDomain,
hostname: data.targetDomain,
};
let maxTries = 5;
let pullZone: {
id: any;
name?: string;
originUrl?: string;
hostname?: string;
};
do {
let id = v4();
let requestArgs: CreatePullZoneMethodArgs = {
zoneId: id, // this is technically the zone name. It should be unique.
originUrl: appInfo.sourceDomain,
};
try {
pullZone = await bunnyCdn.createPullZone(requestArgs);
appInfo.apId = id;
} catch (error) {
maxTries -= 1;
if (
error instanceof BunnyCdnError &&
error.name === 'pullzone.name_taken'
) {
continue;
} else if (maxTries == 0) {
throw 'Max number of tries for creating pullzone was reached.';
} else {
throw error;
}
}
} while (maxTries > 0);
// Create custom hostname
await bunnyCdn
.addCustomHostname({
pullZoneId: pullZone!.id,
hostname: appInfo.hostname,
})
.catch((e) => {
throw e;
});
// Add record to the database, if it's not been already added
const zoneRecord = await prisma.zones.findMany({
where: {
zoneId: pullZone!.id,
name: appInfo.apId,
sourceDomain: appInfo.sourceDomain,
},
});
if (zoneRecord.length == 0) {
await prisma.zones.create({
data: {
zoneId: pullZone!.id,
name: appInfo.apId,
hostname: appInfo.hostname,
sourceDomain: appInfo.sourceDomain,
},
});
}
return formatJSONResponse({
appInfo,
});
} catch (e) {
return formatJSONResponse({
status: 500,
message: e,
});
}
};

View File

@ -0,0 +1,25 @@
import { handlerPath } from '@libs/handler-resolver';
export const verifyApp = {
handler: `${handlerPath(__dirname)}/handler.verifyApp`,
events: [
{
http: {
method: 'post',
path: 'verifyApp',
},
},
],
};
export const submitAppInfo = {
handler: `${handlerPath(__dirname)}/handler.submitAppInfo`,
events: [
{
http: {
method: 'post',
path: 'app',
},
},
],
};

View File

@ -1,9 +1,8 @@
import { APIGatewayProxyResult, APIGatewayEvent } from 'aws-lambda';
import { formatJSONResponse } from '@libs/api-gateway';
import { v4 } from 'uuid';
import { prisma } from '@libs/prisma';
import { account, nfaContract } from '@libs/nfa-contract';
import { contractInstance } from '@libs/nfa-contract';
export const submitBuildInfo = async (
event: APIGatewayEvent
@ -26,6 +25,7 @@ export const submitBuildInfo = async (
commitHash: data.commitHash,
ipfsHash: data.ipfsHash,
domain: data.domain,
verificationTransactionHash: 'Not verified.',
};
// Add build record to the database, if it's not already added
@ -39,6 +39,7 @@ export const submitBuildInfo = async (
});
if (buildRecord.length == 0) {
await prisma.builds.create({
data: {
githubRepository: buildInfo.githubRepository,
@ -60,18 +61,13 @@ export const submitBuildInfo = async (
});
if (mintRecord.length > 0) {
// Trigger verification
// Mark the token as verified in the contract
// call the `setTokenVerified` method
await nfaContract.methods
.setTokenVerified(mintRecord[0].tokenId, true)
.send({
from: account.address,
gas: '1000000',
})
.catch(console.error);
const transaction = await contractInstance.setTokenVerified(
mintRecord[0].tokenId,
true
);
buildInfo.verificationTransactionHash = transaction.hash;
// Update the database record in the tokens collection
await prisma.tokens.updateMany({
where: {

View File

@ -4,32 +4,61 @@ import {
///APIGatewayEventRequestContext,
} from 'aws-lambda';
import { formatJSONResponse } from '@libs/api-gateway';
import { v4 } from 'uuid';
import { initPrisma, prisma } from '@libs/prisma';
import { account, nfaContract, web3 } from '@libs/nfa-contract';
import { contractInstance, web3 } from '@libs/nfa-contract';
import { isTheSignatureValid } from '@libs/verify-signature';
import { ethers } from 'ethers';
export const submitMintInfo = async (
event: APIGatewayEvent
///context: APIGatewayEventRequestContext
): Promise<APIGatewayProxyResult> => {
try {
if (event.body === null) {
if (event.body === null || event.body === undefined) {
return formatJSONResponse({
status: 422,
message: 'Required parameters were not passed.',
});
}
// Check the alchemy signature and confirm the value of the ALCHEMY_SIGNING_KEY env variable.
// If both are valid, verify the authenticity of the request.
if (event.headers['x-alchemy-signature'] === undefined)
throw Error("Header field 'x-alchemy-signature' was not found.");
if (process.env.ALCHEMY_SIGNING_KEY === undefined)
throw Error('ALCHEMY_SIGNING_KEY env variable not found.');
else if (
!isTheSignatureValid(
event.body,
event.headers['x-alchemy-signature'],
process.env.ALCHEMY_SIGNING_KEY
)
) {
return formatJSONResponse({
status: 401,
message: 'Unauthorized',
});
}
const id = v4();
/**if (!verifyAlchemySig(event.headers.xalchemywork)) {
throw new Error('Invalid sig');
}**/
const eventBody = JSON.parse(event.body);
const topics = eventBody.event.data.block.logs[1].slice(1, 3);
const hexCalldata = eventBody.event.data.block.logs[1].data;
if (
eventBody.event.data.block.logs[1].topics[0] !=
ethers.utils.id(
'NewMint(uint256,string,string,string,string,string,string,string,string,uint24,bool,address,address,address)'
) // The first topic should be equal to the hash of the event name and its parameter types
) {
throw Error(
'The emitted event is not `NewMint`. This request is ignored.'
);
}
const topics = eventBody.event.data.block.logs[1].topics.slice(1, 4);
const hexCalldata = eventBody.event.data.block.logs[1].data;
const decodedLogs = web3.eth.abi.decodeLog(
[
{
@ -130,6 +159,7 @@ export const submitMintInfo = async (
owner: decodedLogs.owner,
ipfsHash: decodedLogs.ipfsHash,
domain: decodedLogs.externalURL,
verificationTransactionHash: 'Not verified',
};
initPrisma();
@ -150,13 +180,13 @@ export const submitMintInfo = async (
if (build.length > 0) {
// Mark the token as verified in the contract
try {
// what if the token has been burned?
// call the `setTokenVerified` method
await nfaContract.methods
.setTokenVerified(mintInfo.tokenId, true)
.send({
from: account.address,
gas: '1000000',
});
const transaction = await contractInstance.setTokenVerified(
mintInfo.tokenId,
true
);
mintInfo.verificationTransactionHash = transaction.hash;
verified = true;
} catch (error) {
// catch transaction error

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,244 @@
import axios, { AxiosRequestConfig } from 'axios';
type BunnyCdnErrorOptions = {
name: string;
message: string;
};
export class BunnyCdnError extends Error {
public name: string;
constructor({ name, message }: BunnyCdnErrorOptions) {
super(message);
this.name = name;
this.message = message;
}
}
const BUNNY_CDN_API_URL = 'https://api.bunny.net';
export class BunnyCdn {
constructor(private accessKey: string) {}
private enforceHttps = (url: string) => {
if (url.startsWith('https://')) {
return url;
}
if (url.startsWith('http://')) {
return url.replace('http://', 'https://');
}
return `https://${url}`;
};
private fetchBunny = async (endpoint: string, init: AxiosRequestConfig) => {
const headers = {
AccessKey: this.accessKey,
};
const response = await axios.request({
url: `${BUNNY_CDN_API_URL}${endpoint}`,
method: 'POST',
headers,
validateStatus: (status) => status < 500,
timeout: 20_000,
...init,
});
const data = response.data;
if (data.ErrorKey || data.Message) {
throw new BunnyCdnError({ name: data.ErrorKey, message: data.Message });
}
return data;
};
public async getPullZone(options: GetPullZoneMethodArgs) {
const data = (await this.fetchBunny(`/pullzone/${options.pullZoneId}`, {
method: 'GET',
})) as PullZoneData;
return {
id: data.Id,
name: data.Name,
originUrl: data.OriginUrl,
hostnames: data.Hostnames,
};
}
public async createPullZone(options: CreatePullZoneMethodArgs) {
const httpsOriginUrl = this.enforceHttps(options.originUrl);
const data = (await this.fetchBunny(`/pullzone`, {
data: {
Name: options.zoneId,
Type: 0,
OriginUrl: httpsOriginUrl,
UseStaleWhileOffline: true,
},
})) as PullZoneData;
const systemHostname: HostnameInterface[] = data.Hostnames.filter(
(hostname) => hostname.IsSystemHostname === true
);
return {
id: data.Id,
name: data.Name,
originUrl: data.OriginUrl,
hostname: systemHostname[0].Value,
};
}
public async updatePullZone(options: UpdatePullZoneMethodArgs) {
const httpsOriginUrl = this.enforceHttps(options.originUrl);
await this.fetchBunny(`/pullzone/${options.pullZoneId}`, {
data: {
OriginUrl: httpsOriginUrl,
},
});
return true;
}
public async deletePullZone(options: DeletePullZoneMethodArgs) {
await this.fetchBunny(`/pullzone/${options.pullZoneId}`, {
method: 'DELETE',
});
return true;
}
public async addCustomHostname(options: AddCustomHostnameMethodArgs) {
await this.fetchBunny(`/pullzone/${options.pullZoneId}/addHostname`, {
data: {
Hostname: options.hostname,
},
});
return true;
}
public async removeCustomHostname(options: AddCustomHostnameMethodArgs) {
await this.fetchBunny(`/pullzone/${options.pullZoneId}/removeHostname`, {
method: 'DELETE',
data: {
Hostname: options.hostname,
},
});
return true;
}
public async loadFreeCertificate(options: LoadFreeCertificateMethodArgs) {
await this.fetchBunny(
`/pullzone/loadFreeCertificate?hostname=${options.hostname}`,
{
method: 'GET',
}
);
return true;
}
public async setForceSSL(options: SetForceSSLMethodArgs) {
await this.fetchBunny(`/pullzone/${options.pullZoneId}/setForceSSL`, {
data: {
Hostname: options.hostname,
ForceSSL: options.shouldForceSSL ?? true,
},
});
return true;
}
public async purgePullZoneCache(options: PurgePullZoneCacheMethodArgs) {
await this.fetchBunny(`/pullzone/${options.pullZoneId}/purgeCache`, {});
return true;
}
}
export type ErrorData = {
ErrorKey: string;
Field: string;
Message: string;
};
export type GetPullZoneMethodArgs = {
pullZoneId: string;
};
export type CreatePullZoneMethodArgs = {
zoneId: string;
originUrl: string;
};
export type UpdatePullZoneMethodArgs = {
pullZoneId: string;
originUrl: string;
};
export type DeletePullZoneMethodArgs = {
pullZoneId: string;
};
export type AddCustomHostnameMethodArgs = {
pullZoneId: string;
hostname: string;
};
export type LoadFreeCertificateMethodArgs = {
hostname: string;
};
export type RemoveCustomHostnameMethodArgs = {
pullZoneId: string;
hostname: string;
};
export type SetForceSSLMethodArgs = {
pullZoneId: string;
hostname: string;
shouldForceSSL?: boolean;
};
export type PurgePullZoneCacheMethodArgs = {
pullZoneId: string;
};
type HostnameInterface = {
/** The unique ID of the hostname */
Id: number;
/** The hostname value for the domain name */
Value: string;
/** Determines if the Force SSL feature is enabled */
ForceSSL: string;
/** Determines if this is a system hostname controlled by bunny.net */
IsSystemHostname: boolean;
/** Determines if the hostname has an SSL certificate configured */
HasCertificate: true;
};
type PullZoneData = {
Id: number;
Name: string;
OriginUrl: string;
Hostnames: HostnameInterface[];
};
export type FetchPullZoneArgs = {
name: string;
};
export const fetchPullZone = async ({ name }: FetchPullZoneArgs) => {
const hostname = `https://${name}.b-cdn.net`;
const response = await axios.head(hostname, { timeout: 20_000 });
if (response.headers['cdn-pullzone']) {
return { hostname, id: response.headers['cdn-pullzone'] };
}
};

View File

@ -1,16 +0,0 @@
var Web3 = require('web3');
var web3 = new Web3(Web3.givenProvider || 'ws://localhost:17895');
export const logDecoder = (
eventFieldsABI: {
indexed: boolean;
internalType: string;
name: string;
type: string;
}[],
data: string,
topics: string[]
) => {
return web3.eth.abi.decodeLog(eventFieldsABI, data, topics);
};

View File

@ -1,18 +1,27 @@
import { Contract, Wallet, ethers } from 'ethers';
import * as abiFile from '@libs/FleekERC721.json';
import Web3 from 'web3';
import * as abiFile from '../../../contracts/deployments/goerli/FleekERC721.json';
import * as dotenv from 'dotenv';
dotenv.config();
if (process.env.PRIVATE_KEY === undefined) {
throw Error('Private key environment variable not set.');
throw Error('Private Key environment variable not set.');
}
const contract_address = abiFile.address;
export const abi = abiFile.abi as any;
if (process.env.JSON_RPC === undefined) {
throw Error('JSON RPC environment variable not set.');
}
export const web3 = new Web3('https://rpc.goerli.mudit.blog');
export const nfaContract = new web3.eth.Contract(abi, contract_address);
export const account = web3.eth.accounts.privateKeyToAccount(
process.env.PRIVATE_KEY
);
if (process.env.CONTRACT_ADDRESS === undefined) {
throw Error('Contract Address environment variable not set.');
}
export const web3 = new Web3(process.env.JSON_RPC);
const CONTRACT_ADDRESS = process.env.CONTRACT_ADDRESS;
// RPC loaded from env file previously
const provider = new ethers.providers.JsonRpcProvider(process.env.JSON_RPC);
// account key loaded from env file previously
export const signer = new Wallet(process.env.PRIVATE_KEY, provider);
export const contractInstance = new Contract(
CONTRACT_ADDRESS,
abiFile.abi,
signer
);

View File

@ -1,6 +1,6 @@
import { PrismaClient } from '@prisma/client';
export const prisma = new PrismaClient();
export const prisma = new PrismaClient({log: ['warn', 'error']});
export async function initPrisma() {
// Connect the client

View File

@ -0,0 +1,12 @@
import * as crypto from 'crypto';
export function isTheSignatureValid(
body: string, // must be raw string body, not json transformed version of the body
signature: string, // the "lambda-signature" from header
signingKey: string // signing secret key for front-end
): boolean {
const hmac = crypto.createHmac('sha256', signingKey); // Create a HMAC SHA256 hash using the signing key
hmac.update(body, 'utf8'); // Update the token hash with the request body using utf8
const digest = hmac.digest('hex');
return signature === digest; // returns true for valid and false for invalid
}

File diff suppressed because it is too large Load Diff

View File

@ -3,11 +3,11 @@ schema:
dataSources:
- kind: ethereum
name: FleekNFA
network: goerli
network: mainnet # Works with the Anvil QA network also
source:
address: "0x8795608346Eb475E42e69F1281008AEAa522479D" # <- Proxy Contract
address: "0x1CfD8455F189c56a4FBd81EB7D4118DB04616BA8" # <- Proxy Contract
abi: FleekNFA
startBlock: 8671990
# startBlock: 8671990
mapping:
kind: ethereum/events
apiVersion: 0.0.7
@ -32,7 +32,7 @@ dataSources:
- ChangeAccessPointAutoApproval
abis:
- name: FleekNFA
file: ../contracts/artifacts/contracts/FleekERC721.sol/FleekERC721.json
file: ../contracts/deployments/qanet/FleekERC721.json
eventHandlers:
- event: Approval(indexed address,indexed address,indexed uint256)
handler: handleApproval

View File

@ -1,33 +0,0 @@
import { useState } from 'react';
import { createContext } from './utils';
export type AppContext = {
backgroundColor: string;
setBackgroundColor: (color: string) => void;
};
const [AppProvider, useContext] = createContext<AppContext>({
name: 'App.Context',
hookName: 'App.useContext',
providerName: 'App.Provider',
});
export abstract class App {
static readonly useContext = useContext;
static readonly Provider: React.FC<App.AppProps> = ({ children }) => {
const [backgroundColor, setBackgroundColor] = useState('');
return (
<AppProvider value={{ backgroundColor, setBackgroundColor }}>
{children}
</AppProvider>
);
};
}
export namespace App {
export type AppProps = {
children: React.ReactNode;
};
}

View File

@ -2,7 +2,6 @@ import { HashRouter, Navigate, Route, Routes } from 'react-router-dom';
import { themeGlobals } from '@/theme/globals';
import { App as AppContext } from './app.context';
import { AppPage, ToastProvider } from './components';
import {
ComponentsTest,
@ -18,19 +17,17 @@ export const App: React.FC = () => {
<>
<HashRouter>
<ToastProvider />
<AppContext.Provider>
<AppPage>
<Routes>
<Route path="/" element={<ExploreView />} />
<Route path="/mint" element={<Mint />} />
<Route path="/create-ap/:id" element={<CreateAP />} />
<Route path="/nfa/:id" element={<IndexedNFAView />} />
{/** TODO remove for release */}
<Route path="/components-test" element={<ComponentsTest />} />
<Route path="*" element={<Navigate to="/" />} />
</Routes>
</AppPage>
</AppContext.Provider>
<AppPage>
<Routes>
<Route path="/" element={<ExploreView />} />
<Route path="/mint" element={<Mint />} />
<Route path="/create-ap/:id" element={<CreateAP />} />
<Route path="/nfa/:id" element={<IndexedNFAView />} />
{/** TODO remove for release */}
<Route path="/components-test" element={<ComponentsTest />} />
<Route path="*" element={<Navigate to="/" />} />
</Routes>
</AppPage>
</HashRouter>
</>
);

View File

@ -1,6 +1,8 @@
import { App } from '@/app.context';
import React from 'react';
import { NavBar } from '@/components';
import { GradientOverlay } from './gradient-overlay';
import { PageStyles as PS } from './page.styles';
export type AppPageProps = {
@ -8,18 +10,12 @@ export type AppPageProps = {
};
export const AppPage: React.FC<AppPageProps> = ({ children }: AppPageProps) => {
const { backgroundColor } = App.useContext();
const background = `linear-gradient(180deg, #${backgroundColor}59 0%, #000000 30%)`;
return (
<PS.Container
css={{
background: background,
}}
>
<>
<GradientOverlay />
<NavBar />
<PS.Content as="main">{children}</PS.Content>
</PS.Container>
<PS.Content>{children}</PS.Content>
</>
);
};

View File

@ -0,0 +1,17 @@
import { useAppStore } from '@/store';
import { PageStyles as PS } from './page.styles';
export const GradientOverlay: React.FC = () => {
const { overlayColor } = useAppStore();
if (!overlayColor) return null;
return (
<PS.GradientOverlay
css={{
background: `linear-gradient(180deg, #${overlayColor}59 0%, transparent 30%)`,
}}
/>
);
};

View File

@ -1,12 +1,16 @@
import { styled } from '@/theme';
export abstract class PageStyles {
public static readonly Container = styled('div', {
minHeight: '100vh',
position: 'relative',
});
export const PageStyles = {
GradientOverlay: styled('div', {
position: 'absolute',
inset: 0,
pointerEvents: 'none',
}),
public static readonly Content = styled('div', {
Content: styled('main', {
position: 'relative',
display: 'flex',
flexDirection: 'column',
width: '100%',
minHeight: '85vh',
maxWidth: '$6xl',
@ -16,5 +20,5 @@ export abstract class PageStyles {
'@md': {
padding: '0 $6',
},
});
}
}),
};

File diff suppressed because one or more lines are too long

View File

@ -179,6 +179,7 @@ export namespace ArgumentsMaps {
string, // string ENS
string, // string commitHash
string, // string gitRepository
string, // string ipfsHash
string, // string logo
number, // uint24 color
boolean, // bool accessPointAutoApproval

View File

@ -39,6 +39,7 @@ export const FleekERC721 = {
params.description.replaceAll(/\n/g, '\\n'), //replace break lines with \\n so it doesn't break the json,
params.image,
params.externalUrl,
params.ipfsHash,
params.ens,
params.commitHash,
params.repo
@ -155,6 +156,7 @@ export namespace FleekERC721 {
description: string;
owner: string;
externalUrl: string;
ipfsHash: string;
image: string;
ens?: string;
commitHash: string;

View File

@ -3,12 +3,12 @@ import {
getDefaultClient,
} from 'connectkit';
import { createClient, WagmiConfig } from 'wagmi';
import { goerli } from 'wagmi/chains';
import { goerli, polygonMumbai } from 'wagmi/chains';
import { env } from '@/constants';
const alchemyId = env.alchemy.id;
const chains = [goerli];
const chains = [polygonMumbai];
const wagmiClient = createClient(
getDefaultClient({

View File

@ -0,0 +1,33 @@
import { createSlice, PayloadAction } from '@reduxjs/toolkit';
import { RootState } from '@/store';
import { useAppSelector } from '@/store/hooks';
export interface AppState {
overlayColor?: string;
}
const initialState: AppState = {
overlayColor: undefined,
};
export const appSlice = createSlice({
name: 'AppSlice',
initialState,
reducers: {
setOverlayColor: (state, action: PayloadAction<string>) => {
state.overlayColor = action.payload;
},
clearOverlayColor: (state) => {
state.overlayColor = undefined;
},
},
});
export const appActions = appSlice.actions;
const selectAppState = (state: RootState): AppState => state.app;
export const useAppStore = (): AppState => useAppSelector(selectAppState);
export default appSlice.reducer;

View File

@ -0,0 +1 @@
export * from './app-slice';

View File

@ -43,9 +43,9 @@ export const bunnyCDNActions = {
...asyncThunk,
};
const selectENSState = (state: RootState): BunnyCDNState => state.bunnyCDN;
const selectBunnyCDNState = (state: RootState): BunnyCDNState => state.bunnyCDN;
export const useBunnyCDNStore = (): BunnyCDNState =>
useAppSelector(selectENSState);
useAppSelector(selectBunnyCDNState);
export default bunnyCDNSlice.reducer;

View File

@ -3,3 +3,4 @@ export * from './github';
export * from './toasts';
export * from './ens';
export * from './bunny-cdn';
export * from './app';

View File

@ -1,5 +1,6 @@
import { configureStore } from '@reduxjs/toolkit';
import appReducer from './features/app/app-slice';
import bunnyCDNReducer from './features/bunny-cdn/bunny-cdn-slice';
import ENSReducer from './features/ens/ens-slice';
import fleekERC721Reducer from './features/fleek-erc721/fleek-erc721-slice';
@ -8,6 +9,7 @@ import toastsReducer from './features/toasts/toasts-slice';
export const store = configureStore({
reducer: {
app: appReducer,
bunnyCDN: bunnyCDNReducer,
ENS: ENSReducer,
fleekERC721: fleekERC721Reducer,

View File

@ -3,14 +3,8 @@ import { styled } from '@/theme';
export const CreateApStyles = {
Container: styled(Flex, {
height: '100%',
flexDirection: 'column',
minHeight: '85vh',
alignItems: 'flex-start',
'@md': {
alignItems: 'center',
justifyContent: 'center',
},
flex: 1,
alignItems: 'center',
justifyContent: 'center',
}),
};

View File

@ -20,7 +20,7 @@ const ItemsDropdown: ItemDropdown[] = [
export const ComboboxTest: React.FC = () => {
const selected = useState<Item>();
const selectedDropdown = useState<ItemDropdown>(ItemsDropdown[0]);
const selectedDropdown = useState<ItemDropdown>();
return (
<Flex

View File

@ -1,8 +1,8 @@
import { useEffect, useRef, useState } from 'react';
import { Link } from 'react-router-dom';
import { App } from '@/app.context';
import { Button } from '@/components';
import { useAppStore } from '@/store';
import { parseNumberToHexColor } from '@/utils/color';
import { IndexedNFA } from '../../indexed-nfa.context';
@ -18,8 +18,8 @@ export const IndexedNFAAsideFragment: React.FC = () => {
const [top, setTop] = useState<number>();
const { nfa } = IndexedNFA.useContext();
const { backgroundColor } = App.useContext();
const background = `radial-gradient(closest-corner circle at 90% 45%, #${backgroundColor}8c 1% ,#${backgroundColor}57 20%, transparent 40%), radial-gradient(closest-corner circle at 60% 25%, #${backgroundColor} 3%, #${backgroundColor}73 30%, #181818 70%)`;
const { overlayColor } = useAppStore();
const background = `radial-gradient(closest-corner circle at 90% 45%, #${overlayColor}8c 1% ,#${overlayColor}57 20%, transparent 40%), radial-gradient(closest-corner circle at 60% 25%, #${overlayColor} 3%, #${overlayColor}73 30%, #181818 70%)`;
useEffect(() => {
setTop(ref.current?.getBoundingClientRect().top);

View File

@ -3,8 +3,8 @@ import { ethers } from 'ethers';
import { useEffect } from 'react';
import { useNavigate, useParams } from 'react-router-dom';
import { App } from '@/app.context';
import { getNFADetailDocument } from '@/graphclient';
import { appActions, useAppDispatch } from '@/store';
import { AppLog } from '@/utils';
import { parseNumberToHexColor } from '@/utils/color';
@ -18,14 +18,14 @@ import { IndexedNFAStyles as S } from './indexed-nfa.styles';
export const IndexedNFAView: React.FC = () => {
const { id } = useParams<{ id: string }>();
const { setBackgroundColor } = App.useContext();
const dispatch = useAppDispatch();
const navigate = useNavigate();
useEffect(() => {
return () => {
setBackgroundColor('000000');
dispatch(appActions.clearOverlayColor());
};
}, [setBackgroundColor]);
}, [dispatch]);
const handleError = (error: unknown): void => {
AppLog.errorToast(
@ -43,7 +43,9 @@ export const IndexedNFAView: React.FC = () => {
onCompleted(data) {
if (!data.token) handleError(new Error('Token not found'));
if (data.token?.color)
setBackgroundColor(parseNumberToHexColor(data.token.color));
dispatch(
appActions.setOverlayColor(parseNumberToHexColor(data.token.color))
);
},
onError(error) {
handleError(error);

View File

@ -3,13 +3,8 @@ import { styled } from '@/theme';
export const MintStyles = {
Container: styled(Flex, {
height: '100%',
flex: 1,
alignItems: 'center',
justifyContent: 'center',
minHeight: '85vh',
alignItems: 'flex-start',
'@md': {
alignItems: 'center',
},
}),
};

View File

@ -2,3 +2,4 @@ export * from './logo-field';
export * from './app-name-field';
export * from './app-description-field';
export * from './ens-domain-field';
export * from './ipfs-hash-field';

View File

@ -0,0 +1,17 @@
import { Form } from '@/components';
import { useMintFormContext } from '../mint-form.context';
export const IPFSHashField: React.FC = () => {
const {
form: { ipfsHash },
} = useMintFormContext();
return (
<Form.Field context={ipfsHash}>
<Form.Label>IPFS Hash</Form.Label>
<Form.Input placeholder="Your IPFS hash" />
<Form.Overline />
</Form.Field>
);
};

View File

@ -13,6 +13,7 @@ export type MintFormContext = {
logoColor: FormField;
ens: FormField;
domainURL: FormField;
ipfsHash: FormField;
verifier: FormField;
isValid: ReactState<boolean>;
};
@ -51,6 +52,7 @@ export const useMintFormContextInit = (): MintFormContext => ({
StringValidators.required,
StringValidators.isUrl,
]),
ipfsHash: useFormField('ipfsHash', [StringValidators.required]),
ens: useFormField('ens', [], ''),
verifier: useFormField('verifier', [StringValidators.required]),
isValid: useState(false),

View File

@ -16,6 +16,7 @@ import {
AppDescriptionField,
AppNameField,
EnsDomainField,
IPFSHashField,
LogoField,
} from './fields';
import { useMintFormContext } from './mint-form.context';
@ -38,12 +39,18 @@ export const MintFormStep: React.FC = () => {
domainURL: {
value: [domainURL],
},
ipfsHash: {
value: [ipfsHash],
},
gitCommit: {
value: [gitCommit],
},
gitBranch: {
value: [gitBranch],
},
verifier: {
value: [verifier],
},
logoColor: {
value: [logoColor],
},
@ -69,10 +76,11 @@ export const MintFormStep: React.FC = () => {
ens,
gitCommit,
`${repositoryName?.url}/tree/${gitBranch}`,
ipfsHash,
appLogo,
parseColorToNumber(logoColor),
verifyNFA,
'0xdBb04e00D5ec8C9e3aeF811D315Ee7C147c5DBFD', //TODO remove hardcode
verifier,
{ value: billing },
]);
@ -99,6 +107,7 @@ export const MintFormStep: React.FC = () => {
<Flex css={{ gap: '$4', flexDirection: 'column' }}>
<AppNameField />
<AppDescriptionField />
<IPFSHashField />
<EnsDomainField />
<LogoField />
</Flex>

View File

@ -9,6 +9,7 @@ import { useMintFormContext } from '../form-step';
// TODO: remove mocked items after graphql api is fixed
const mockedItems = [
'0xd4997d0facc83231b9f26a8b2155b4869e99946f',
'0xdBb04e00D5ec8C9e3aeF811D315Ee7C147c5DBFD',
'0x7ED735b7095C05d78dF169F991f2b7f1A1F1A049',
];