diff --git a/_config.yml b/_config.yml index 1501fcdf7abaf2f40828bba3319fad81afd40778..a9592a131d0be407b5e84d2d0a680ed2c31626f1 100644 --- a/_config.yml +++ b/_config.yml @@ -24,24 +24,24 @@ collections: id: quickstart position: 3 output: true - nodeop: - title: Node Operation - id: nodeop - position: 4 - output: true javascript: title: Javascript id: javascript - position: 5 + position: 4 output: true tutorials: title: Tutorials id: tutorials - position: 6 + position: 5 output: true services: title: Services id: services + position: 6 + output: true + nodeop: + title: Node Operation + id: nodeop position: 7 output: true resources: diff --git a/_data/apidefinitions/broadcast_ops.yml b/_data/apidefinitions/broadcast_ops.yml index 0b89495b739f1295f4b93e1403138385d540a97b..68b86c08a0b4be17e8006add0100b77bde216294 100644 --- a/_data/apidefinitions/broadcast_ops.yml +++ b/_data/apidefinitions/broadcast_ops.yml @@ -74,7 +74,7 @@ - '["vote", {"voter":"hiveio","author":"alice","permlink":"a-post-by-alice","weight":10000}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestvote)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#vote)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#vote)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#vote)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.vote)' @@ -166,7 +166,7 @@ - '["comment",{"parent_author":"","parent_permlink":"hiveio","author":"alice","permlink":"a-post-by-alice","title":"A Post By Alice","body":"This is my post.","json_metadata":"{\"tags\":[\"hiveio\",\"example\",\"tags\"]}"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestpost)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#comment)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#comment)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#comment)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.comment)' @@ -194,7 +194,7 @@ - '["transfer",{"from":"hiveio","to":"alice","amount":{"amount":"10","precision":3,"nai":"@@000000021"},"memo":"Thanks for all the fish."}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requesttransfer)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#transfer)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#transfer)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#transfer)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.transfer)' @@ -681,7 +681,7 @@ - '["custom_json",{"required_auths":["alice"],"required_posting_auths":[],"id":"witness","json":"[\"enable_content_editing\", {\"account\": \"alice\", \"relock_time\": \"2100-01-01T12:00:00\"}]"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestcustomjson)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' @@ -931,7 +931,7 @@ witness according to stake. See: [#169]({{ 'https://github.com/steemit/steem/issues/169' | archived_url }}), [PY: Account Recovery]({{ '/tutorials-python/account_recovery.html' | relative_url }}) - roles: + roles: - owner params: - account_to_recover diff --git a/_data/apidefinitions/broadcast_ops_communities.yml b/_data/apidefinitions/broadcast_ops_communities.yml index 783ec1c9f26d3598bbe32c53714e6b688c1d42aa..843ada6f3232ad354d268a692937d89538db1dbc 100644 --- a/_data/apidefinitions/broadcast_ops_communities.yml +++ b/_data/apidefinitions/broadcast_ops_communities.yml @@ -27,7 +27,7 @@ - '["custom_json",{"required_auths":[],"required_posting_auths":["alice"],"id":"community","json":"[\"setRole\", {\"community\": \"hive-123456\", \"account\": \"edward\", \"role\": \"member\"}]"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestcustomjson)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' @@ -44,7 +44,7 @@ - '["custom_json",{"required_auths":[],"required_posting_auths":["alice"],"id":"community","json":"[\"setUserTitle\", {\"community\": \"hive-123456\", \"account\": \"alice\", \"title\": \"Founder\"}]"}]' client_docs: - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' - name: mutePost @@ -56,7 +56,7 @@ - '["custom_json",{"required_auths":[],"required_posting_auths":["alice"],"id":"community","json":"[\"mutePost\",{\"community\":\"hive-123456\",\"account\":\"eve\",\"permlink\":\"re-eve-comment-1564339652z\",\"notes\":\"Off Topic\"}]"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestcustomjson)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' @@ -66,7 +66,7 @@ - '["custom_json",{"required_auths":[],"required_posting_auths":["alice"],"id":"community","json":"[\"unmutePost\",{\"community\":\"hive-123456\",\"account\":\"eve\",\"permlink\":\"re-eve-comment-1564339652z\",\"notes\":\"On Topic (on second thought)\"}]"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestcustomjson)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' @@ -75,7 +75,7 @@ - '["custom_json",{"required_auths":[],"required_posting_auths":["alice"],"id":"community","json":"[\"updateProps\",{\"community\":\"hive-123456\",\"props\":{\"title\":\"Anti-Knitting\",\"about\":\"A community against knitting.\",\"is_nsfw\":false,\"description\":\"If you like to knitting, go away.\",\"flag_text\":\"Must hate knitting or else you will be muted.\"}}]"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestcustomjson)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' @@ -85,7 +85,7 @@ - '["custom_json",{"required_auths":[],"required_posting_auths":["alice"],"id":"community","json":"[\"subscribe\",{\"community\":\"hive-123456\"}]"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestcustomjson)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' @@ -95,7 +95,7 @@ - '["custom_json",{"required_auths":[],"required_posting_auths":["alice"],"id":"community","json":"[\"pinPost\",{\"community\":\"hive-123456\",\"account\":\"alice\",\"permlink\":\"a-post-by-alice\"}]"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestcustomjson)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' @@ -105,7 +105,7 @@ - '["custom_json",{"required_auths":[],"required_posting_auths":["alice"],"id":"community","json":"[\"unsubscribe\",{\"community\":\"hive-123456\"}]"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestcustomjson)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' @@ -115,7 +115,7 @@ - '["custom_json",{"required_auths":[],"required_posting_auths":["alice"],"id":"community","json":"[\"unpinPost\",{\"community\":\"hive-123456\",\"account\":\"alice\",\"permlink\":\"a-post-by-alice\"}]"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestcustomjson)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' @@ -125,7 +125,7 @@ - '["custom_json",{"required_auths":[],"required_posting_auths":["alice"],"id":"community","json":"[\"flagPost\",{\"community\":\"hive-123456\",\"account\":\"eve\",\"permlink\":\"a-post-by-eve\",\"notes\":\"Warning\"}]"}]' client_docs: - '[hive-keychain](https://github.com/stoodkev/hive-keychain#requestcustomjson)' - - '[hivesigner.js](https://github.com/ledgerconnect/hivesigner.js#custom-json)' + - '[hivesigner.js](https://github.com/ecency/hivesigner-sdk#custom-json)' - '[hive-js](https://gitlab.syncad.com/hive/hive-js/tree/master/doc#custom-json)' - '[beem](https://beem.readthedocs.io/en/latest/beem.transactionbuilder.html)' - '[hive-ruby](https://www.rubydoc.info/gems/hive-ruby/Hive/Broadcast.custom_json)' diff --git a/_data/nav.yml b/_data/nav.yml index 62b1def9fb2cbad224b4ca302a25fe6d449ee0de..743792aec4e60a9a1cda33077747cbe9c17593cc 100644 --- a/_data/nav.yml +++ b/_data/nav.yml @@ -8,16 +8,10 @@ toc: url: "/quickstart/" position: 2 exclude: true - - title: "Node Operation" - collection: "nodeop" - url: "/nodeop/" - position: 3 - exclude: true - top_level_section: true - title: "Tutorials" collection: "tutorials" url: "/tutorials/" - position: 4 + position: 3 exclude: true docs: - title: Javascript @@ -33,20 +27,26 @@ toc: - title: "Services" collection: "services" url: "/services/" - position: 6 + position: 4 exclude: true - title: "Appbase API" - collection: apidefinitions + collection: apidefinitions url: "/apidefinitions/" exclude: true - position: 7 + position: 5 + - title: "Node Operation" + collection: "nodeop" + url: "/nodeop/" + position: 6 + exclude: true + top_level_section: true - title: "Resources" collection: "resources" url: "/resources/" exclude: true - position: 8 + position: 7 - title: "Glossary" collection: "glossary" url: "/glossary/" - position: 9 + position: 8 exclude: true diff --git a/_introduction/web3.md b/_introduction/web3.md new file mode 100644 index 0000000000000000000000000000000000000000..e603bc928f590ff385ae8c6633f6e3ba147e14a2 --- /dev/null +++ b/_introduction/web3.md @@ -0,0 +1,23 @@ +--- +title: Web2 vs Web3 +position: 2 +--- + +Internet, we know of today mostly contain websites and services which is referred as Web2. Dominated by companies that +provide services in exchange for your personal data. Blockchains such as Hive, powers websites and services that go +into Web3 category, referring to decentralized apps which runs on the blockchain. +These are apps that allow anyone to participate without monetising their personal data, true ownership. + +| Aspect | Web2 | Web3 | +|------------------|--------------------------------------------|---------------------------------------------------| +| Data Ownership | Controlled by centralized entities | Decentralized, users own their data | +| Trust | Relies on trust in centralized entities | Trustless system through blockchain technology | +| Censorship | Vulnerable to censorship by centralized authorities | Resistant to censorship due to decentralization | +| Scalability | Limited scalability due to centralized infrastructure | Potential for greater scalability through decentralization | +| Transparency | Lack of transparency in data handling | Transparent, immutable record on the blockchain | +| Governance | Controlled by centralized entities | Community-driven governance through DAOs | +| Monetization | Centralized monetization models (e.g., advertising) | Decentralized monetization via tokens and smart contracts | +| Intermediaries | Relies heavily on intermediaries (e.g., social media platforms) | Removes intermediaries through decentralization | +| Innovation | Innovation driven by large tech companies | Innovation driven by decentralized protocols and communities | + +**Start developing and investing your time building products on Web3 to empower communities, now!** diff --git a/_introduction/welcome.md b/_introduction/welcome.md index 019339772eb8c5d3068fb53b706ca6936b93e30b..e9d518f4f30402508113bd948b43d9d9f83bb8f3 100644 --- a/_introduction/welcome.md +++ b/_introduction/welcome.md @@ -1,29 +1,13 @@ --- -title: Welcome to Hive +title: Intro to Hive position: 1 --- -#### Hive Developer Portal +**[Hive](https://hive.io){:target="_blank"}** is a decentralized blockchain with fast transaction speed (3s) without fees, human +readable account names, decentralized community fund and algorithmic stablecoin (HBD) backed by HIVE. This site is designed to aid those interested in developing applications on the **[Hive](https://hive.io){:target="_blank"}** blockchain. -If you don't know about Hive yet, visit [hive.io](https://hive.io). - The code for this site is on [gitlab](https://gitlab.syncad.com/hive/devportal). If you'd like to see a specific improvement you can either submit a pull request or file an issue. Please follow our [contribution guidelines](https://gitlab.syncad.com/hive/devportal/-/blob/develop/CONTRIBUTING.md) - - - -#### Site Resources - -The **quickstart** section has some general information on working with nodes, and testnets. - -Our **tutorials** section will take you through the basics of interacting with Hive apis in Javascript, Python, and Ruby. - -More detailed API specs are available in the **APPBASE API** section. Those apis are under development, so please read -notes at the top of each section. - -You can find community resources, libraries, and more interactive ways to get help and education in the **Resources->Communities** section. - -The **glossary** has Hive specific terms that will help you understand documentation, whitepapers, and the speech of other hivers. diff --git a/_introduction/workflow.md b/_introduction/workflow.md new file mode 100644 index 0000000000000000000000000000000000000000..ca4f7a6919d43b115a485638ec6c882ebf59f5b8 --- /dev/null +++ b/_introduction/workflow.md @@ -0,0 +1,10 @@ +--- +title: Developer workflow +position: 3 +--- + +This is high level representation of how websites or services talk with blockchain. It's important to note that +this is an oversimplification of the Hive network for the purposes of making it easier to understand learning. + +<img src="{{ '/images/hive-dev-structure.png' | relative_url }}" width="80%" alt="Hive devs workflow" /> + diff --git a/_quickstart/authentication.md b/_quickstart/authentication.md new file mode 100644 index 0000000000000000000000000000000000000000..68b56e33cd76b09568f317a4d4b6fccdd9693415 --- /dev/null +++ b/_quickstart/authentication.md @@ -0,0 +1,71 @@ +--- +title: Authentication +position: 3 +exclude: true +--- +#### User authentication + +In Web3 unlike Web2, authenticating user has different meaning. Since only user has and knows their private keys, there should +be a secure way to sign the transaction because there is no concept of Login and applications won't have direct access to user private keys. +Web3 way of authentication or login, means user has to sign arbitrary message to verify ownership and wallet applications facilitate that. +In Hive, there are services maintained and developed by community. These services help to decrease trust on all new dapps and services. +They help to minimize hacks and private key stealing, phishing attacks done by malicious actors. It is recommended to +utilize and integrate these services into your website or apps so users can quickly authenticate and start using your app +without fear of loosing their private keys. + +#### HiveSigner + +This is OAuth2 standard built on top of Hive blockchain. Just like web2 OAuth2 integration, Hivesigner integration works in similar way. + +**Application side** +1. Create Hive account for application/website [https://signup.hive.io](https://signup.hive.io). +2. Login with that account into Hivesigner and set account as **Application** from [https://hivesigner.com/profile](https://hivesigner.com/profile). +3. Authorize **hivesigner** with Application account by clicking this link [https://hivesigner.com/authorize/hivesigner](https://hivesigner.com/authorize/hivesigner). +4. Finalize app integration [https://docs.hivesigner.com/h/guides/get-started/hivesigner-oauth2](https://docs.hivesigner.com/h/guides/get-started/hivesigner-oauth2). + +**User side** + +Overview of steps that user experiences during Login/Authentication in your website or app. + +1. Website or application will forward user to Hivesigner.com to authenticate. +2. Hivesigner after verification/authentication redirects user back website or application with **access token**. +3. Access token used by website or application to sign and broadcast transactions on blockchain. + +For more detailed instruction please follow [HiveSigner documentation](https://docs.hivesigner.com/). +HiveSigner SDK: [https://www.npmjs.com/package/hivesigner](https://www.npmjs.com/package/hivesigner) + +#### HiveKeychain + +Hive Keychain is an extension for accessing Hive-enabled distributed applications, or "dApps" in your Chromium or Firefox browser! + +**Application side** +1. Send a handshake to make sure the extension is installed on browser. +2. Decrypt a message encrypted by a Hive account private key (commonly used for "logging in") +3. Create and sign transaction +4. Broadcast transaction. + +**User side** +1. Install Keychain browser extension and import accounts +2. On Login/Authentication popup from website/application, verify message and sign with selected account. +3. Signature used from website/application to sign transactions going forward, every transaction should be signed by user. + +For more detailed instruction please follow [HiveKeychain documentation](https://github.com/hive-keychain/hive-keychain-extension/blob/master/documentation/README.md). +HiveKeychain SDK: [https://www.npmjs.com/package/keychain-sdk](https://www.npmjs.com/package/keychain-sdk) + +#### HiveAuth + +HiveAuth is decentralized solution for any application (either web, desktop or mobile) to easily authenticate +users without asking them to provide any password or private key. + +**Application side** +1. Open a Websocket connection with HAS server. +2. Generate unique auth_key for each user account every time they Login/Authenticate. +3. After user authenticates, auth_key used for broadcasting transactions. + +**User side** +1. Install wallet applications that support Hive Auth. +2. On Login/Authentication popup from website/application, verify message with selected account. +3. Unique auth key generated by application for user account used for signing transaction going forward, every transaction should be signed by user. + +For more detailed instruction please follow [HiveAuth documentation](https://docs.hiveauth.com/). + diff --git a/_quickstart/choose_library.md b/_quickstart/choose_library.md index aea0cb2730b839ca440ecd74bdc40cc990902f05..99398e78c87f39bb544f564e71253b41b46a7cb3 100644 --- a/_quickstart/choose_library.md +++ b/_quickstart/choose_library.md @@ -1,12 +1,15 @@ --- title: SDK Libraries -position: 1 +position: 4 exclude: true --- +#### Software development kits -Getting started to develop robust and feature rich **Hive** applications couldn't be easier. Accessing hive data is easy from various options depending on your infrastructure and objectives. +Accessing and interacting with Hive data is easy from various options depending on your infrastructure and objectives. -Building a web3 app is a breeze with the [JavaScript, check related tutorials]({{ '/tutorials/#tutorials-javascript' | relative_url }}). There is also a [Python tutorials]({{ '/tutorials/#tutorials-python' | relative_url }}) available, [hive.blog]({{ '/services/#services-hive-blog' | relative_url }}), as well as many [opensource projects]({{ '/resources/#resources-overview' | relative_url }}) which could be beneficial for your Hive project. +Building a web3 app is a breeze with the [JavaScript, check related tutorials]({{ '/tutorials/#tutorials-javascript' | relative_url }}). +There is also a [Python tutorials]({{ '/tutorials/#tutorials-python' | relative_url }}) available, [hive.blog]({{ '/services/#services-hive-blog' | relative_url }}), as well as many [opensource projects]({{ '/resources/#resources-overview' | relative_url }}) +which could be beneficial for your Hive project. --- diff --git a/_quickstart/fetch_broadcast.md b/_quickstart/fetch_broadcast.md new file mode 100644 index 0000000000000000000000000000000000000000..ff174cb842170b9f7c8f181d750b1c9f8b1774b2 --- /dev/null +++ b/_quickstart/fetch_broadcast.md @@ -0,0 +1,19 @@ +--- +title: Get and Set +position: 5 +exclude: true +--- +#### Fetching data + +Fetching blockchain data with help of SDK libraries, couldn't be much simpler and easy to do. Nodejs and Python libraries +help any developer to quickly access Blockchain data and do analysis or create apps using those data. SDK by default utilizes +JSON-RPC to make a request to Hive nodes. Community has created [REST API alternatives](https://hivexplorer.com/api-docs) as well which could easily be integrated +with any app on any framework or application. + +#### Broadcast data + +Broadcasting or Set, modifying blockchain data could be done directly with above SDK libraries as well. Broadcasting or making +any modification into account require user's private key. Using [Authentication services]({{ '/quickstart/#quickstart-authentication' | relative_url }}) highly recommended in this use-cases. + +By utilizing Authenticating services, you can eliminate or give more confidence to user, so they are assured their keys are safe. +They can securely interact with your application, website or service. diff --git a/_quickstart/hive_full_nodes.md b/_quickstart/hive_full_nodes.md index c1ca0152792c4aebcbc78256f960373993beed3f..c06b3e35f383bbaf727d105872423ef51ab5b45c 100644 --- a/_quickstart/hive_full_nodes.md +++ b/_quickstart/hive_full_nodes.md @@ -1,14 +1,14 @@ --- title: Hive Nodes -position: 2 +position: 1 exclude: true --- -Applications that interface directly with the Hive blockchain will need to connect to a `hived` node. Developers may choose to use one of the public API nodes that are available, or run their own instance of a node. +Applications that interface directly with the Hive blockchain will need to connect to a `Hive` node. Developers may choose to use one of the public API nodes that are available, or run their own instance of a node. ### Public Nodes -Although `hived` fully supports WebSockets (`wss://` and `ws://`) public nodes typically do not. All nodes listed use HTTPS (`https://`). If you require WebSockets for your solutions, please consider setting up your own `hived` node or proxy WebSockets to HTTPS using [lineman](https://gitlab.syncad.com/hive/lineman). +All nodes listed use HTTPS (`https://`). If you require WebSockets for your solutions, please consider setting up your own `hived` node or proxy WebSockets to HTTPS using [lineman](https://gitlab.syncad.com/hive/lineman). <div id="report"> <table> @@ -97,71 +97,96 @@ Although `hived` fully supports WebSockets (`wss://` and `ws://`) public nodes t The simplest way to get started is by deploying a pre-built dockerized container. -##### Dockerized p2p Node +**System Requirements** -To install a witness or seed node: +We assume the base system will be running at least Ubuntu 22.04 (jammy). Everything will likely work with later +versions of Ubuntu. IMPORTANT UPDATE: experiments have shown 20% better API performance when running U23.10, so this +latter version is recommended over Ubuntu 22 as a hosting OS. -```bash -git clone https://github.com/someguy123/hive-docker.git -cd hive-docker -# If you don't already have a docker installation, this will install it for you -./run.sh install_docker +For a mainnet API node, we recommend: -# This downloads/updates the low-memory docker image for Hive -./run.sh install +* At least 32GB of memory. If you have 64GB, it will improve the time it takes to sync from scratch, but +it should make less of a difference if you're starting from a mostly-synced HAF node (i.e., +restoring a recent ZFS snapshot) +* 4TB of NVMe storage + * Hive block log & shared memory: 500GB + * Base HAF database: 3.5T (before 2x lz4 compression) + * Hivemind database: 0.65T (before 2x lz4 compression) + * base HAF + Hivemind: 2.14T (compressed) + * HAF Block Explorer: ~0.2T -# If you are a witness, you need to adjust the configuration as needed -# e.g. witness name, private key, logging config, turn off p2p-endpoint etc. -# If you're running a seed, then don't worry about the config, it will just work -nano data/witness_node_data_dir/config.ini +#### Running Hive node with Docker -# (optional) Setting the .env file up (see the env settings section of this readme) -# will help you to adjust settings for hive-in-a-box -nano .env +**Install ZFS support** -# Once you've configured your server, it's recommended to download the block log, as replays can be -# faster than p2p download -./run.sh dlblocks +We strongly recommend running your HAF instance on a ZFS filesystem, and this documentation assumes you will be running +ZFS. Its compression and snapshot features are particularly useful when running a HAF node. +We intend to publish ZFS snapshots of fully-synced HAF nodes that can downloaded to get a HAF node up & running quickly, +avoiding multi-day replay times. -# You'll also want to set the shared memory size (use sudo if not logged in as root). -# Adjust 64G to whatever size is needed for your type of server and make sure to leave growth room. -# Please be aware that the shared memory size changes constantly. Ask in a witness chatroom if you're unsure. -./run.sh shm_size 64G - -# Then after you've downloaded the blockchain, you can start hived in replay mode -./run.sh replay -# If you DON'T want to replay, use "start" instead -./run.sh start ``` +sudo apt install zfsutils-linux +``` + +**Install Docker** + +Follow official guide [https://docs.docker.com/engine/install/](https://docs.docker.com/engine/install/). + +**Create a ZFS pool** -You may want to persist the /dev/shm size (shared memory) across reboots. To do this, you can edit `/etc/fstab`, please be very careful, as any mistakes in this file will cause your system to become unbootable. +Create your ZFS pool if necessary. HAF requires at least 4TB of space, and 2TB NVMe drives are readily available, +so we typically construct a pool striping data across several 2TB drives. If you have three or four drives, you will +get somewhat better read/write performance, and the extra space can come in handy. +To create a pool named "haf-pool" using the first two NVMe drives in your system, use a command like: -##### Dockerized Full Node +``` +sudo zpool create haf-pool /dev/nvme0n1 /dev/nvme1n1 +``` +If you name your ZFS pool something else, configure the name in the environment file, as described in the next section. +Note: By default, ZFS tries to detect your disk's actual sector size, but it often gets it wrong for modern NVMe drives, +which will degrade performance due to having to write the same sector multiple times. If you don't know the actual +sector size, we recommend forcing the sector size to 8k by specifying +setting ashift=13 (e.g., zfs create -o ashift=13 haf-pool /dev....) + +**Configure your environment** -To install a full RPC node - follow the same steps as above, but use `install_full` instead of `install`. +Clone HAF API Node repository from here [https://github.com/openhive-network/haf_api_node](https://github.com/openhive-network/haf_api_node) +Make a copy of the file .env.example and customize it for your system. This file contains configurable parameters for +things like directories versions of hived, HAF, and associated tools +The docker compose command will automatically read the file named .env. If you want to keep multiple configurations, +you can give your environment files different names like .env.dev and .env.prod, then explicitly specify the filename +when running docker compose: `docker compose --env-file=.env.dev ...` -Remember to adjust the config, you'll need a higher shared memory size (potentially up to 1 TB), and various plugins. +**Set up ZFS filesystems** -For handling requests to your full node in docker, I recommend spinning up an nginx container, and connecting nginx to the Hive node using a docker network. +The HAF installation is spread across multiple ZFS datasets, which allows us to set different ZFS options for different +portions of the data. We recommend that most nodes keep the default datasets in order to enable easy sharing of snapshots. -Example: +**Initializing from scratch** +If you're starting from scratch, after you've created your zpool and configured its name in the .env file as described +above, run: ``` -docker network create rpc_default -# Assuming your RPC container is called "rpc1" instead of witness/seed -docker network connect rpc_default rpc1 -docker network connect rpc_default nginx +sudo ./create_zfs_datasets.sh ``` -Nginx will now be able to access the container RPC1 via `http://rpc1:8090` (assuming 8090 is the RPC port in your config). Then you can set up SSL and container port forwarding as needed for nginx. +to create and mount the datasets. +By default, the dataset holding most of the database storage uses zfs compression. The dataset for the blockchain data +directory (which holds the block_log for hived and the shared_memory.bin file) is not compressed because hived directly +manages compression of the block_log file. +If you have a LOT of nvme storage (e.g. 6TB+), you can get better API performance at the cost of disk storage by +disabling ZFS compression on the database dataset, but for most nodes this isn't recommended. -##### Customized Docker Node +**Assisted startup** + +``` +./assisted_startup.sh +``` -If the above options do not meet your needs, refer to Hive-in-a-box by [@someguy123](https://hive.blog/@someguy123): +Depending on your environment variables, assisted start up script will quickly bootstrap the process. -[https://github.com/someguy123/hive-docker](https://github.com/someguy123/hive-docker) -##### Building Without Docker +#### Building Without Docker Full non-docker steps can be reviewed here: @@ -169,20 +194,54 @@ Full non-docker steps can be reviewed here: ### Syncing blockchain -Normally syncing blockchain starts from very first, `0` genesis block. It might take long time to catch up with live network, because it connects to various p2p nodes in the Hive network and requests blocks from 0 to head block. It stores blocks in block log file and builds up the current state in the shared memory file. But there is a way to bootstrap syncing by using trusted `block_log` file. The block log is an external append only log of the blocks. It contains blocks that are only added to the log after they are irreversible because the log is append only. +**Initializing from a snapshot** + +If you're starting with one of our snapshots, the process of restoring the snapshots will create the correct +datasets with the correct options set. +First, download the snapshot file from: [https://gtg.openhive.network/get/snapshot/](https://gtg.openhive.network/get/snapshot/) +Since these snapshots are huge, it's best to download the snapshot file to a different disk (a magnetic +HDD will be fine for this) that has enough free space for the snapshot first, then restore it to the ZFS pool. +This lets you easily resume the download if your transfer is interrupted. If you download directly to +the ZFS pool, any interruption would require you to start the download from the beginning. +``` +wget -c https://whatever.net/snapshot_filename +``` + + +If the transfer gets interrupted, run the same command again to resume. +Then, to restore the snapshot, run: +``` +sudo zfs recv -d -v haf-pool < snapshot_filename +``` + +**Replay with blocklog** + +Normally syncing blockchain starts from very first, `0` genesis block. It might take long time to catch up with live +network, because it connects to various p2p nodes in the Hive network and requests blocks from 0 to head block. +It stores blocks in block log file and builds up the current state in the shared memory file. But there is a way to +bootstrap syncing by using trusted `block_log` file. The block log is an external append only log of the blocks. +It contains blocks that are only added to the log after they are irreversible because the log is append only. -Trusted block log file helps to download blocks faster. Various operators provide public block log file which can be downloaded from: +Trusted block log file helps to download blocks faster. Various operators provide public block log file which can be +downloaded from: * [https://files.privex.io/hive/](https://files.privex.io/hive/) * [https://gtg.openhive.network/get/blockchain/block_log](https://gtg.openhive.network/get/blockchain/block_log) -Both `block_log` files updated periodically, as of March 2021 uncompressed `block_log` file size ~350 GB. (Docker container on `stable` branch of Hive source code has option to use `USE_PUBLIC_BLOCKLOG=1` to download latest block log and start Hive node with replay.) +Both `block_log` files updated periodically, as of March 2021 uncompressed `block_log` file size ~350 GB. +(Docker container on `stable` branch of Hive source code has option to use `USE_PUBLIC_BLOCKLOG=1` to download latest +block log and start Hive node with replay.) -Block log should be place in `blockchain` directory below `data_dir` and node should be started with `--replay-blockchain` to ensure block log is valid and continue to sync from the point of snapshot. Replay uses the downloaded block log file to build up the shared memory file up to the highest block stored in that snapshot and then continues with sync up to the head block. +Block log should be place in `blockchain` directory below `data_dir` and node should be started with +`--replay-blockchain` to ensure block log is valid and continue to sync from the point of snapshot. +Replay uses the downloaded block log file to build up the shared memory file up to the highest block stored in that +snapshot and then continues with sync up to the head block. -Replay helps to sync blockchain in much faster rate, but as blockchain grows in size replay might also take some time to verify blocks. +Replay helps to sync blockchain in much faster rate, but as blockchain grows in size replay might also take some time +to verify blocks. -There is another [trick which might help]({{ 'https://github.com/steemit/steem/issues/2391' | archived_url }}) with faster sync/replay on smaller equipped servers: +There is another [trick which might help]({{ 'https://github.com/steemit/steem/issues/2391' | archived_url }}) with +faster sync/replay on smaller equipped servers: ``` while : @@ -192,11 +251,13 @@ do done ``` -Above bash script drops `block_log` from the OS cache, leaving more memory free for backing the blockchain database. It might also help while running live, but measurement would be needed to determine this. +Above bash script drops `block_log` from the OS cache, leaving more memory free for backing the blockchain database. +It might also help while running live, but measurement would be needed to determine this. ##### Few other tricks that might help: -For Linux users, virtual memory writes dirty pages of the shared file out to disk more often than is optimal which results in hived being slowed down by redundant IO operations. These settings are recommended to optimize reindex time. +For Linux users, virtual memory writes dirty pages of the shared file out to disk more often than is optimal which +results in hived being slowed down by redundant IO operations. These settings are recommended to optimize reindex time. ``` echo 75 | sudo tee /proc/sys/vm/dirty_background_ratio diff --git a/_quickstart/testnet.md b/_quickstart/testnet.md index 4455e458fe999b4d3093fb2f893e081e6148ef0e..fd1481a809992553df6a71fce1eb5b463b859ed9 100644 --- a/_quickstart/testnet.md +++ b/_quickstart/testnet.md @@ -1,6 +1,6 @@ --- title: Hive Testnet -position: 3 +position: 2 exclude: true --- diff --git a/_resources/hivesigner_libs.md b/_resources/hivesigner_libs.md index 01a0d0b15e3d797c54235b0cf0cb141f3072abba..b40c928bc1b8127a436edee7f0de2f9c105a91d5 100644 --- a/_resources/hivesigner_libs.md +++ b/_resources/hivesigner_libs.md @@ -10,7 +10,7 @@ If you're wondering what Hivesigner is, go [here]({{ '/services/#services-hivesi --- -**Hivesigner SDK** - [https://github.com/ledgerconnect/hivesigner.js](https://github.com/ledgerconnect/hivesigner.js) +**Hivesigner SDK** - [https://github.com/ecency/hivesigner-sdk](https://github.com/ecency/hivesigner-sdk) An official javascript library for utilizing Hivesigner. diff --git a/_services/hivesigner.md b/_services/hivesigner.md index 7c441ecdee17ac4e4f1bd466388ff7a56cd43d58..2c517b23b5e45a4ba643e18d264121cb3d83c41d 100644 --- a/_services/hivesigner.md +++ b/_services/hivesigner.md @@ -23,11 +23,11 @@ Simplified, the process includes the following steps: 3. The user is redirected to the application redirect URI along with the access token Once the application has an access token, it may use the token to access the user's account via the API, limited to the scope of access, until the token expires or is revoked. -A full breakdown of OAuth2 and how it applies to Hive and Hivesigner can be found [here](https://github.com/ledgerconnect/hivesigner/wiki/OAuth-2#code-authorization-flow). +A full breakdown of OAuth2 and how it applies to Hive and Hivesigner can be found [here](https://docs.hivesigner.com). **Useful Links** -* [Hivesigner Repo](https://github.com/ledgerconnect/hivesigner) +* [Hivesigner Repo](https://github.com/ecency/hivesigner-ui) * [Community Resources]({{ '/resources/#resources-hivesigner-libs' | relative_url }}) * [Hivesigner docs](https://docs.hivesigner.com) diff --git a/_tutorials-javascript/hivesigner.md b/_tutorials-javascript/hivesigner.md index d7bd53744bd0965574ec79f579fa4abb318a9450..953b222a8d6d90129a5d3193005953eba375dfc4 100644 --- a/_tutorials-javascript/hivesigner.md +++ b/_tutorials-javascript/hivesigner.md @@ -22,7 +22,7 @@ Some other calls that require an access token (or login) are: * Follow * Reblog -Learn more about [Hivesigner operations here](https://github.com/ledgerconnect/hivesigner.js) +Learn more about [Hivesigner operations here](https://github.com/ecency/hivesigner-sdk) ## Steps diff --git a/images/hive-dev-structure.png b/images/hive-dev-structure.png new file mode 100644 index 0000000000000000000000000000000000000000..2e39942cb5f3389eae00a57d24ab2a97ec0a530f Binary files /dev/null and b/images/hive-dev-structure.png differ