Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- {
- "success": true,
- "payload": {
- "value": {
- "id": "5a3941347b19",
- "versionId": "a87d3649ca12",
- "creatorId": "14497d5e4e89",
- "homeCollectionId": "",
- "title": "How to build and deploy a Question-Answering AI web app.",
- "detectedLanguage": "en",
- "latestVersion": "a87d3649ca12",
- "latestPublishedVersion": "a87d3649ca12",
- "hasUnpublishedEdits": true,
- "latestRev": 546,
- "createdAt": 1647691133850,
- "updatedAt": 1647976400524,
- "acceptedAt": 0,
- "firstPublishedAt": 1647701604545,
- "latestPublishedAt": 1647704085863,
- "vote": false,
- "experimentalCss": "",
- "displayAuthor": "",
- "content": {
- "subtitle": "Building a Question-Answer bot with pre-trained model from Hugging face and deploying using Gradio.",
- "bodyModel": {
- "paragraphs": [{
- "name": "9de6",
- "type": 3,
- "text": "How to build and deploy a Question-Answering AI web app.",
- "markups": []
- }, {
- "name": "1736",
- "type": 1,
- "text": "Building a Question-Answer bot with pre-trained model from Hugging face and deploying using Gradio.",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 99
- }, {
- "type": 2,
- "start": 0,
- "end": 99
- }]
- }, {
- "name": "aad7",
- "type": 4,
- "text": "Image credit: Google photos",
- "markups": [],
- "layout": 1,
- "metadata": {
- "id": "1*1QLDW4QsIxlKKsrWmsCRbQ.png",
- "originalWidth": 540,
- "originalHeight": 360,
- "isFeatured": true
- }
- }, {
- "name": "c823",
- "type": 1,
- "text": "In this article, I’m going to show how to build a simple question-answering bot in python using pre-trained model from hugging face and deploying it as a web app using Gradio.",
- "markups": [{
- "type": 3,
- "start": 119,
- "end": 131,
- "href": "https://huggingface.co/",
- "title": "",
- "rel": "",
- "anchorType": 0
- }]
- }, {
- "name": "2d29",
- "type": 1,
- "text": "Here is the Github Repository containing the codes.",
- "markups": [{
- "type": 3,
- "start": 12,
- "end": 29,
- "href": "https://github.com/christian-freshness/NLP-Web-App-deployment-using-Gradio",
- "title": "",
- "rel": "",
- "anchorType": 0
- }]
- }, {
- "name": "7a95",
- "type": 1,
- "text": "What is a Question-Answering Model?",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 35
- }]
- }, {
- "name": "7f43",
- "type": 1,
- "text": "According to wikipedia, Question answering (QA) is a computer science discipline within the fields of information retrieval and natural language processing (NLP), which is concerned with building systems that automatically answer questions posed by humans in a natural language.",
- "markups": [{
- "type": 3,
- "start": 13,
- "end": 22,
- "href": "https://en.wikipedia.org/wiki/Question_answering#:~:text=Question%20answering%20(QA)%20is%20a,humans%20in%20a%20natural%20language.",
- "title": "",
- "rel": "",
- "anchorType": 0
- }, {
- "type": 1,
- "start": 172,
- "end": 277
- }]
- }, {
- "name": "f3f2",
- "type": 1,
- "text": "Question-Answering Models are machine or deep learning models that can answer questions given some context, and sometimes without any context (e.g. open-domain QA). They can extract answer phrases from paragraphs, paraphrase the answer generatively, or choose one option out of a list of given options, and so on.",
- "markups": [{
- "type": 1,
- "start": 30,
- "end": 141
- }]
- }, {
- "name": "8bbb",
- "type": 13,
- "text": "Let’s get straight to building",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 30
- }]
- }, {
- "name": "6fb3",
- "type": 1,
- "text": "Stack: Tensorflow, Transformers, Huggingface, Gradio, huggingface spaces.",
- "markups": [{
- "type": 3,
- "start": 33,
- "end": 44,
- "href": "https://huggingface.co/",
- "title": "",
- "rel": "",
- "anchorType": 0
- }, {
- "type": 3,
- "start": 54,
- "end": 72,
- "href": "https://huggingface.co/spaces",
- "title": "",
- "rel": "",
- "anchorType": 0
- }]
- }, {
- "name": "7da9",
- "type": 1,
- "text": "Install and import dependencies",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 31
- }]
- }, {
- "name": "930f",
- "type": 1,
- "text": "To begin, we are going to install the needed dependencies",
- "markups": []
- }, {
- "name": "a784",
- "type": 8,
- "text": "!pip install tensorflow\n!pip install transformers",
- "markups": [{
- "type": 2,
- "start": 23,
- "end": 24
- }]
- }, {
- "name": "fbeb",
- "type": 1,
- "text": "importing",
- "markups": []
- }, {
- "name": "846e",
- "type": 8,
- "text": "import tensorflow as tf\nimport transformers\nfrom transformers import pipeline",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 6
- }, {
- "type": 1,
- "start": 18,
- "end": 20
- }, {
- "type": 1,
- "start": 24,
- "end": 48
- }, {
- "type": 1,
- "start": 62,
- "end": 68
- }]
- }, {
- "name": "f449",
- "type": 1,
- "text": "Import Model",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 12
- }]
- }, {
- "name": "e236",
- "type": 1,
- "text": "Here, we are going to import and download the pre-trained Question-answering model from Hugging Face. First, we are going to import the model class and the tokenizer.",
- "markups": []
- }, {
- "name": "c104",
- "type": 8,
- "text": "from transformers import AutoTokenizer, TFAutoModelForQuestionAnswering",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 4
- }, {
- "type": 1,
- "start": 18,
- "end": 24
- }]
- }, {
- "name": "94d0",
- "type": 1,
- "text": "Now we are going to install and setup the model and the tokenizer.",
- "markups": []
- }, {
- "name": "d7e7",
- "type": 8,
- "text": "model = TFAutoModelForQuestionAnswering.from_pretrained(\"bert-large-uncased-whole-word-masking-finetuned-squad\",return_dict=False)",
- "markups": [{
- "type": 1,
- "start": 6,
- "end": 7
- }, {
- "type": 1,
- "start": 39,
- "end": 40
- }, {
- "type": 1,
- "start": 123,
- "end": 129
- }]
- }, {
- "name": "7e45",
- "type": 8,
- "text": "tokenizer = AutoTokenizer.from_pretrained(\"bert-large-uncased-whole-word-masking-finetuned-squad\")",
- "markups": [{
- "type": 1,
- "start": 10,
- "end": 11
- }, {
- "type": 1,
- "start": 25,
- "end": 26
- }]
- }, {
- "name": "e8e6",
- "type": 8,
- "text": "nlp = pipeline(\"question-answering\", model=model, tokenizer=tokenizer)",
- "markups": [{
- "type": 1,
- "start": 4,
- "end": 5
- }, {
- "type": 1,
- "start": 42,
- "end": 43
- }, {
- "type": 1,
- "start": 59,
- "end": 60
- }]
- }, {
- "name": "5d9a",
- "type": 1,
- "text": "Above we got the name of the model in Hugging face which is the “bert-large-uncased-whole-word-masking-finetuned-squad”",
- "markups": []
- }, {
- "name": "e7cc",
- "type": 1,
- "text": "Next, we instantiated the tokenizer and the model itself on the model name.",
- "markups": []
- }, {
- "name": "e85b",
- "type": 1,
- "text": "Testing the model",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 17
- }]
- }, {
- "name": "29f2",
- "type": 4,
- "text": "",
- "markups": [],
- "layout": 1,
- "metadata": {
- "id": "1*pmquHb1iK-_NlRj_phcj1Q.png",
- "originalWidth": 796,
- "originalHeight": 234
- }
- }, {
- "name": "3002",
- "type": 13,
- "text": "Deploying The Model as a Web App using Gradio",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 45
- }]
- }, {
- "name": "e99a",
- "type": 1,
- "text": "Here we are going to deploy our model using Gradio.",
- "markups": []
- }, {
- "name": "900c",
- "type": 1,
- "text": "What is Gradio?",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 15
- }, {
- "type": 2,
- "start": 0,
- "end": 15
- }]
- }, {
- "name": "3b64",
- "type": 1,
- "text": "Gradio is a GUI library that allows you to create customizable GUI components for your Machine Learning model. To know more about Gradio u can check the Gradio website",
- "markups": [{
- "type": 3,
- "start": 153,
- "end": 167,
- "href": "https://gradio.app/",
- "title": "",
- "rel": "",
- "anchorType": 0
- }]
- }, {
- "name": "5917",
- "type": 1,
- "text": "Installing Gradio",
- "markups": []
- }, {
- "name": "039b",
- "type": 8,
- "text": "!pip install gradio",
- "markups": []
- }, {
- "name": "3ab4",
- "type": 1,
- "text": "Import",
- "markups": []
- }, {
- "name": "7987",
- "type": 8,
- "text": "import gradio as gr",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 6
- }, {
- "type": 1,
- "start": 14,
- "end": 16
- }]
- }, {
- "name": "1b5d",
- "type": 1,
- "text": "Creating the function for Gradio",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 32
- }]
- }, {
- "name": "2327",
- "type": 1,
- "text": "To understand why we have to write a function, you must first understand that gradio builds GUI components for our Machine Learning model based on the function. The function provides a way for gradio to get input from users and pass it on to the ML model which will then process it and then pass it back to gradio which then passes the result out.",
- "markups": []
- }, {
- "name": "f277",
- "type": 8,
- "text": "# creating the function\ndef func(context, question):\n result = nlp(question = question, context=context)\n return result['answer']",
- "markups": [{
- "type": 1,
- "start": 24,
- "end": 27
- }, {
- "type": 1,
- "start": 62,
- "end": 63
- }, {
- "type": 1,
- "start": 77,
- "end": 78
- }, {
- "type": 1,
- "start": 96,
- "end": 97
- }, {
- "type": 1,
- "start": 108,
- "end": 114
- }, {
- "type": 2,
- "start": 0,
- "end": 23
- }]
- }, {
- "name": "acd4",
- "type": 1,
- "text": "Now we are going to create our Web App interface using Gradio",
- "markups": []
- }, {
- "name": "e3e3",
- "type": 8,
- "text": "# creating the interface\napp = gr.Interface(fn=func, inputs = ['textbox', 'text'], outputs = 'textbox', title = 'Question Answering bot', theme = 'dark-grass', description = 'Input context and question, then get answers!')",
- "markups": [{
- "type": 1,
- "start": 29,
- "end": 30
- }, {
- "type": 1,
- "start": 33,
- "end": 34
- }, {
- "type": 1,
- "start": 46,
- "end": 47
- }, {
- "type": 1,
- "start": 60,
- "end": 61
- }, {
- "type": 1,
- "start": 91,
- "end": 92
- }, {
- "type": 1,
- "start": 110,
- "end": 111
- }, {
- "type": 1,
- "start": 144,
- "end": 145
- }, {
- "type": 1,
- "start": 172,
- "end": 173
- }, {
- "type": 2,
- "start": 0,
- "end": 24
- }]
- }, {
- "name": "0007",
- "type": 1,
- "text": "launching the app",
- "markups": []
- }, {
- "name": "124d",
- "type": 8,
- "text": "# launching the app\napp.launch(inline=False)",
- "markups": [{
- "type": 1,
- "start": 23,
- "end": 24
- }, {
- "type": 1,
- "start": 37,
- "end": 43
- }, {
- "type": 2,
- "start": 0,
- "end": 19
- }]
- }, {
- "name": "fc56",
- "type": 4,
- "text": "The app interface",
- "markups": [],
- "layout": 1,
- "metadata": {
- "id": "1*8waVwAwRzn6tYX4NG19rRQ.png",
- "originalWidth": 1281,
- "originalHeight": 565
- }
- }, {
- "name": "cded",
- "type": 1,
- "text": "This is the link to the app hosted on hugging face spaces, Q-A bot",
- "markups": [{
- "type": 3,
- "start": 59,
- "end": 66,
- "href": "https://huggingface.co/spaces/AjulorC/question_answering_bot_deployed_with_Gradio",
- "title": "",
- "rel": "noopener",
- "anchorType": 0
- }]
- }, {
- "name": "2464",
- "type": 1,
- "text": "Conclusion",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 10
- }]
- }, {
- "name": "bba4",
- "type": 1,
- "text": "In this article, we have been able to build a natural language processing question-answer model using a pre-trained model from huggingface and deployed the model as a web app using Gradio.",
- "markups": []
- }],
- "sections": [{
- "name": "d737",
- "startIndex": 0
- }, {
- "name": "49cc",
- "startIndex": 3
- }, {
- "name": "dddc",
- "startIndex": 26
- }]
- },
- "postDisplay": {
- "coverless": true
- }
- },
- "virtuals": {
- "allowNotes": true,
- "previewImage": {
- "imageId": "1*1QLDW4QsIxlKKsrWmsCRbQ.png",
- "filter": "",
- "backgroundSize": "",
- "originalWidth": 540,
- "originalHeight": 360,
- "strategy": "resample",
- "height": 0,
- "width": 0
- },
- "wordCount": 566,
- "imageCount": 3,
- "readingTime": 2.6858490566037734,
- "subtitle": "Building a Question-Answer bot with pre-trained model from Hugging face and deploying using Gradio.",
- "userPostRelation": {
- "userId": "bc20856a2347",
- "postId": "5a3941347b19",
- "readAt": 0,
- "readLaterAddedAt": 0,
- "votedAt": 0,
- "collaboratorAddedAt": 0,
- "notesAddedAt": 0,
- "subscribedAt": 0,
- "lastReadSectionName": "",
- "lastReadVersionId": "",
- "lastReadAt": 0,
- "lastReadParagraphName": "",
- "lastReadPercentage": 0,
- "viewedAt": 1647999897359,
- "presentedCountInResponseManagement": 0,
- "clapCount": 0,
- "seriesUpdateNotifsOptedInAt": 0,
- "queuedAt": 0,
- "seriesFirstViewedAt": 0,
- "presentedCountInStream": 1,
- "seriesLastViewedAt": 0,
- "audioProgressSec": 0
- },
- "usersBySocialRecommends": [],
- "noIndex": false,
- "recommends": 2,
- "isBookmarked": false,
- "tags": [{
- "slug": "machine-learning",
- "name": "Machine Learning",
- "postCount": 185272,
- "metadata": {
- "postCount": 185272,
- "coverImage": {
- "id": "1*pLtutNvjMO_7hE0iCmwb4Q.png",
- "originalWidth": 883,
- "originalHeight": 714
- }
- },
- "type": "Tag"
- }, {
- "slug": "deep-learning",
- "name": "Deep Learning",
- "postCount": 48955,
- "metadata": {
- "postCount": 48955,
- "coverImage": {
- "id": "1*bKjSfgg346CboQwUklPUdw.jpeg",
- "originalWidth": 6177,
- "originalHeight": 3522,
- "isFeatured": true
- }
- },
- "type": "Tag"
- }, {
- "slug": "artificial-intelligence",
- "name": "Artificial Intelligence",
- "postCount": 168680,
- "metadata": {
- "postCount": 168680,
- "coverImage": {
- "id": "1*gAn_BSffVBcwCIR6bDgK1g.jpeg"
- }
- },
- "type": "Tag"
- }, {
- "slug": "technology",
- "name": "Technology",
- "postCount": 424827,
- "metadata": {
- "postCount": 424827,
- "coverImage": {
- "id": "1*yH2cmH1uhoFpR7HIseOAsw.jpeg"
- }
- },
- "type": "Tag"
- }, {
- "slug": "data-science",
- "name": "Data Science",
- "postCount": 161508,
- "metadata": {
- "postCount": 161508,
- "coverImage": {
- "id": "0*5CnyWf0j4a9daNkY",
- "originalWidth": 6016,
- "originalHeight": 4016,
- "isFeatured": true,
- "unsplashPhotoId": "ieic5Tq8YMk"
- }
- },
- "type": "Tag"
- }],
- "socialRecommendsCount": 0,
- "responsesCreatedCount": 0,
- "links": {
- "entries": [{
- "url": "https://huggingface.co/",
- "alts": [],
- "httpStatus": 200
- }, {
- "url": "https://huggingface.co/spaces/AjulorC/question_answering_bot_deployed_with_Gradio",
- "alts": [],
- "httpStatus": 200
- }, {
- "url": "https://en.wikipedia.org/wiki/Question_answering#:~:text=Question%20answering%20(QA)%20is%20a,humans%20in%20a%20natural%20language.",
- "alts": [],
- "httpStatus": 200
- }, {
- "url": "https://huggingface.co/spaces",
- "alts": [],
- "httpStatus": 200
- }, {
- "url": "https://github.com/christian-freshness/NLP-Web-App-deployment-using-Gradio",
- "alts": [],
- "httpStatus": 200
- }, {
- "url": "https://gradio.app/",
- "alts": [],
- "httpStatus": 200
- }],
- "version": "0.3",
- "generatedAt": 1647704086526
- },
- "isLockedPreviewOnly": false,
- "metaDescription": "",
- "totalClapCount": 43,
- "sectionCount": 3,
- "readingList": 0,
- "topics": [{
- "topicId": "1eca0103fff3",
- "slug": "machine-learning",
- "createdAt": 1534449726145,
- "deletedAt": 0,
- "image": {
- "id": "1*gFJS3amhZEg_z39D5EErVg@2x.png",
- "originalWidth": 2800,
- "originalHeight": 1750
- },
- "name": "Machine Learning",
- "description": "Teaching the learners.",
- "relatedTopics": [],
- "visibility": 1,
- "relatedTags": [],
- "relatedTopicIds": [],
- "seoTitle": "Machine Learning News and Articles — Medium",
- "type": "Topic"
- }, {
- "topicId": "decb52b64abf",
- "slug": "programming",
- "createdAt": 1493934116328,
- "deletedAt": 0,
- "image": {
- "id": "1*iPa136b1cGEO7lvoXg6uHQ@2x.jpeg",
- "originalWidth": 6016,
- "originalHeight": 4016
- },
- "name": "Programming",
- "description": "The good, the bad, the buggy.",
- "relatedTopics": [],
- "visibility": 1,
- "relatedTags": [],
- "relatedTopicIds": [],
- "seoTitle": "Programming News and Articles — Medium",
- "type": "Topic"
- }]
- },
- "coverless": true,
- "slug": "how-to-build-and-deploy-a-question-answering-ai-web-app",
- "translationSourcePostId": "",
- "translationSourceCreatorId": "",
- "isApprovedTranslation": false,
- "inResponseToPostId": "",
- "inResponseToRemovedAt": 0,
- "isTitleSynthesized": true,
- "allowResponses": true,
- "importedUrl": "",
- "importedPublishedAt": 0,
- "visibility": 0,
- "uniqueSlug": "how-to-build-and-deploy-a-question-answering-ai-web-app-5a3941347b19",
- "previewContent": {
- "bodyModel": {
- "paragraphs": [{
- "name": "previewImage",
- "type": 4,
- "text": "",
- "layout": 10,
- "metadata": {
- "id": "1*1QLDW4QsIxlKKsrWmsCRbQ.png",
- "originalWidth": 540,
- "originalHeight": 360,
- "isFeatured": true
- }
- }, {
- "name": "9de6",
- "type": 3,
- "text": "How to build and deploy a Question-Answering AI web app.",
- "markups": [],
- "alignment": 1
- }],
- "sections": [{
- "startIndex": 0
- }]
- },
- "isFullContent": false,
- "subtitle": "Building a Question-Answer bot with pre-trained model from Hugging face and deploying using Gradio."
- },
- "license": 0,
- "inResponseToMediaResourceId": "",
- "canonicalUrl": "https://medium.com/@christianajulor/how-to-build-and-deploy-a-question-answering-ai-web-app-5a3941347b19",
- "approvedHomeCollectionId": "",
- "isNewsletter": false,
- "newsletterId": "",
- "webCanonicalUrl": "https://medium.com/@christianajulor/how-to-build-and-deploy-a-question-answering-ai-web-app-5a3941347b19",
- "mediumUrl": "https://medium.com/@christianajulor/how-to-build-and-deploy-a-question-answering-ai-web-app-5a3941347b19",
- "migrationId": "",
- "notifyFollowers": true,
- "notifyTwitter": false,
- "notifyFacebook": false,
- "responseHiddenOnParentPostAt": 0,
- "isSeries": false,
- "isSubscriptionLocked": false,
- "seriesLastAppendedAt": 0,
- "audioVersionDurationSec": 0,
- "sequenceId": "",
- "isEligibleForRevenue": false,
- "isBlockedFromHightower": false,
- "deletedAt": 0,
- "lockedPostSource": 0,
- "hightowerMinimumGuaranteeStartsAt": 0,
- "hightowerMinimumGuaranteeEndsAt": 0,
- "featureLockRequestAcceptedAt": 0,
- "mongerRequestType": 1,
- "layerCake": 3,
- "socialTitle": "",
- "socialDek": "",
- "editorialPreviewTitle": "",
- "editorialPreviewDek": "",
- "curationEligibleAt": 0,
- "primaryTopic": {
- "topicId": "decb52b64abf",
- "slug": "programming",
- "createdAt": 1493934116328,
- "deletedAt": 0,
- "image": {
- "id": "1*iPa136b1cGEO7lvoXg6uHQ@2x.jpeg",
- "originalWidth": 6016,
- "originalHeight": 4016
- },
- "name": "Programming",
- "description": "The good, the bad, the buggy.",
- "relatedTopics": [],
- "visibility": 1,
- "relatedTags": [],
- "relatedTopicIds": [],
- "seoTitle": "Programming News and Articles — Medium",
- "type": "Topic"
- },
- "primaryTopicId": "decb52b64abf",
- "isProxyPost": false,
- "proxyPostFaviconUrl": "",
- "proxyPostProviderName": "",
- "proxyPostType": 0,
- "isSuspended": false,
- "isLimitedState": false,
- "seoTitle": "",
- "previewContent2": {
- "bodyModel": {
- "paragraphs": [{
- "name": "9de6",
- "type": 3,
- "text": "How to build and deploy a Question-Answering AI web app.",
- "markups": []
- }, {
- "name": "1736",
- "type": 1,
- "text": "Building a Question-Answer bot with pre-trained model from Hugging face and deploying using Gradio.",
- "markups": [{
- "type": 1,
- "start": 0,
- "end": 99
- }, {
- "type": 2,
- "start": 0,
- "end": 99
- }]
- }, {
- "name": "aad7",
- "type": 4,
- "text": "Image credit: Google photos",
- "markups": [],
- "layout": 1,
- "metadata": {
- "id": "1*1QLDW4QsIxlKKsrWmsCRbQ.png",
- "originalWidth": 540,
- "originalHeight": 360,
- "isFeatured": true
- }
- }, {
- "name": "c823",
- "type": 1,
- "text": "In this article, I’m going to show how to build a simple question-answering bot in python using pre-trained model from hugging face and deploying it as a web app using Gradio.",
- "markups": [{
- "type": 3,
- "start": 119,
- "end": 131,
- "href": "https://huggingface.co/",
- "title": "",
- "rel": "",
- "anchorType": 0
- }]
- }, {
- "name": "2d29",
- "type": 1,
- "text": "Here is the Github Repository…",
- "markups": [{
- "type": 3,
- "start": 12,
- "end": 29,
- "href": "https://github.com/christian-freshness/NLP-Web-App-deployment-using-Gradio",
- "title": "",
- "rel": "",
- "anchorType": 0
- }]
- }],
- "sections": [{
- "name": "d737",
- "startIndex": 0
- }, {
- "name": "49cc",
- "startIndex": 3
- }]
- },
- "isFullContent": false,
- "subtitle": "Building a Question-Answer bot with pre-trained model from Hugging face and deploying using Gradio."
- },
- "cardType": 0,
- "isDistributionAlertDismissed": false,
- "isShortform": false,
- "shortformType": 0,
- "responsesLocked": false,
- "isLockedResponse": false,
- "isPublishToEmail": false,
- "responseDistribution": 0,
- "isMarkedPaywallOnly": false,
- "type": "Post"
- },
- "mentionedUsers": [],
- "collaborators": [],
- "hideMeter": false,
- "collectionUserRelations": [],
- "mode": null,
- "references": {
- "User": {
- "14497d5e4e89": {
- "userId": "14497d5e4e89",
- "name": "Christian Ajulor",
- "username": "christianajulor",
- "createdAt": 1593514066780,
- "imageId": "1*zwa5UIRadQrv6S9yv_KKXQ.jpeg",
- "backgroundImageId": "",
- "bio": "Data Scientist 📈📉 | Machine learning Engineer",
- "twitterScreenName": "AjulorC",
- "socialStats": {
- "userId": "14497d5e4e89",
- "usersFollowedCount": 2,
- "usersFollowedByCount": 3,
- "type": "SocialStats"
- },
- "social": {
- "userId": "bc20856a2347",
- "targetUserId": "14497d5e4e89",
- "type": "Social"
- },
- "allowNotes": 1,
- "mediumMemberAt": 0,
- "isWriterProgramEnrolled": true,
- "isSuspended": false,
- "firstOpenedAndroidApp": 1629069764970,
- "isMembershipTrialEligible": true,
- "facebookDisplayName": "",
- "optInToIceland": false,
- "hasCompletedProfile": true,
- "userDismissableFlags": [12, 18, 29],
- "hasSeenIcelandOnboarding": true,
- "postSubscribeMembershipUpsellShownAt": 0,
- "languageCode": "en-us",
- "type": "User"
- }
- },
- "Social": {
- "14497d5e4e89": {
- "userId": "bc20856a2347",
- "targetUserId": "14497d5e4e89",
- "type": "Social"
- }
- },
- "SocialStats": {
- "14497d5e4e89": {
- "userId": "14497d5e4e89",
- "usersFollowedCount": 2,
- "usersFollowedByCount": 3,
- "type": "SocialStats"
- }
- }
- }
- },
- "v": 3,
- "b": "20220322-2312-root"
- }
Add Comment
Please, Sign In to add comment