LLM

8 Topics
"}},"componentScriptGroups({\"componentId\":\"custom.widget.Beta_MetaNav\"})":{"__typename":"ComponentScriptGroups","scriptGroups":{"__typename":"ComponentScriptGroupsDefinition","afterInteractive":{"__typename":"PageScriptGroupDefinition","group":"AFTER_INTERACTIVE","scriptIds":[]},"lazyOnLoad":{"__typename":"PageScriptGroupDefinition","group":"LAZY_ON_LOAD","scriptIds":[]}},"componentScripts":[]},"component({\"componentId\":\"custom.widget.Beta_Footer\"})":{"__typename":"Component","render({\"context\":{\"component\":{\"entities\":[],\"props\":{}},\"page\":{\"entities\":[],\"name\":\"TagPage\",\"props\":{},\"url\":\"https://community.f5.com/tag/LLM\"}}})":{"__typename":"ComponentRenderResult","html":"
 
 
 
 
 

\"F5 ©2024 F5, Inc. All rights reserved.
Trademarks Policies Privacy California Privacy Do Not Sell My Personal Information
"}},"componentScriptGroups({\"componentId\":\"custom.widget.Beta_Footer\"})":{"__typename":"ComponentScriptGroups","scriptGroups":{"__typename":"ComponentScriptGroupsDefinition","afterInteractive":{"__typename":"PageScriptGroupDefinition","group":"AFTER_INTERACTIVE","scriptIds":[]},"lazyOnLoad":{"__typename":"PageScriptGroupDefinition","group":"LAZY_ON_LOAD","scriptIds":[]}},"componentScripts":[]},"component({\"componentId\":\"custom.widget.Tag_Manager_Helper\"})":{"__typename":"Component","render({\"context\":{\"component\":{\"entities\":[],\"props\":{}},\"page\":{\"entities\":[],\"name\":\"TagPage\",\"props\":{},\"url\":\"https://community.f5.com/tag/LLM\"}}})":{"__typename":"ComponentRenderResult","html":" "}},"componentScriptGroups({\"componentId\":\"custom.widget.Tag_Manager_Helper\"})":{"__typename":"ComponentScriptGroups","scriptGroups":{"__typename":"ComponentScriptGroupsDefinition","afterInteractive":{"__typename":"PageScriptGroupDefinition","group":"AFTER_INTERACTIVE","scriptIds":[]},"lazyOnLoad":{"__typename":"PageScriptGroupDefinition","group":"LAZY_ON_LOAD","scriptIds":[]}},"componentScripts":[]},"component({\"componentId\":\"custom.widget.Consent_Blackbar\"})":{"__typename":"Component","render({\"context\":{\"component\":{\"entities\":[],\"props\":{}},\"page\":{\"entities\":[],\"name\":\"TagPage\",\"props\":{},\"url\":\"https://community.f5.com/tag/LLM\"}}})":{"__typename":"ComponentRenderResult","html":"
"}},"componentScriptGroups({\"componentId\":\"custom.widget.Consent_Blackbar\"})":{"__typename":"ComponentScriptGroups","scriptGroups":{"__typename":"ComponentScriptGroupsDefinition","afterInteractive":{"__typename":"PageScriptGroupDefinition","group":"AFTER_INTERACTIVE","scriptIds":[]},"lazyOnLoad":{"__typename":"PageScriptGroupDefinition","group":"LAZY_ON_LOAD","scriptIds":[]}},"componentScripts":[]},"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/community/NavbarDropdownToggle\"]})":[{"__ref":"CachedAsset:text:en_US-components/community/NavbarDropdownToggle-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"shared/client/components/common/OverflowNav\"]})":[{"__ref":"CachedAsset:text:en_US-shared/client/components/common/OverflowNav-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/messages/MessageView/MessageViewInline\"]})":[{"__ref":"CachedAsset:text:en_US-components/messages/MessageView/MessageViewInline-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/customComponent/CustomComponent\"]})":[{"__ref":"CachedAsset:text:en_US-components/customComponent/CustomComponent-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/users/UserLink\"]})":[{"__ref":"CachedAsset:text:en_US-components/users/UserLink-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/messages/MessageSubject\"]})":[{"__ref":"CachedAsset:text:en_US-components/messages/MessageSubject-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/messages/MessageBody\"]})":[{"__ref":"CachedAsset:text:en_US-components/messages/MessageBody-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/messages/MessageTime\"]})":[{"__ref":"CachedAsset:text:en_US-components/messages/MessageTime-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"shared/client/components/nodes/NodeIcon\"]})":[{"__ref":"CachedAsset:text:en_US-shared/client/components/nodes/NodeIcon-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/messages/MessageUnreadCount\"]})":[{"__ref":"CachedAsset:text:en_US-components/messages/MessageUnreadCount-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/messages/MessageViewCount\"]})":[{"__ref":"CachedAsset:text:en_US-components/messages/MessageViewCount-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/kudos/KudosCount\"]})":[{"__ref":"CachedAsset:text:en_US-components/kudos/KudosCount-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"components/messages/MessageRepliesCount\"]})":[{"__ref":"CachedAsset:text:en_US-components/messages/MessageRepliesCount-1743097581414"}],"cachedText({\"lastModified\":\"1743097581414\",\"locale\":\"en-US\",\"namespaces\":[\"shared/client/components/users/UserAvatar\"]})":[{"__ref":"CachedAsset:text:en_US-shared/client/components/users/UserAvatar-1743097581414"}]},"CachedAsset:pages-1742463843167":{"__typename":"CachedAsset","id":"pages-1742463843167","value":[{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.GetInvolved.MvpProgram","type":"COMMUNITY","urlPath":"/c/how-do-i/get-involved/mvp-program","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"BlogViewAllPostsPage","type":"BLOG","urlPath":"/category/:categoryId/blog/:boardId/all-posts/(/:after|/:before)?","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"CasePortalPage","type":"CASE_PORTAL","urlPath":"/caseportal","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"CreateGroupHubPage","type":"GROUP_HUB","urlPath":"/groups/create","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"CaseViewPage","type":"CASE_DETAILS","urlPath":"/case/:caseId/:caseNumber","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"InboxPage","type":"COMMUNITY","urlPath":"/inbox","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.GetInvolved.AdvocacyProgram","type":"COMMUNITY","urlPath":"/c/how-do-i/get-involved/advocacy-program","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.GetHelp.NonCustomer","type":"COMMUNITY","urlPath":"/c/how-do-i/get-help/non-customer","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HelpFAQPage","type":"COMMUNITY","urlPath":"/help","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.GetHelp.F5Customer","type":"COMMUNITY","urlPath":"/c/how-do-i/get-help/f5-customer","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"IdeaMessagePage","type":"IDEA_POST","urlPath":"/idea/:boardId/:messageSubject/:messageId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"IdeaViewAllIdeasPage","type":"IDEA","urlPath":"/category/:categoryId/ideas/:boardId/all-ideas/(/:after|/:before)?","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"LoginPage","type":"USER","urlPath":"/signin","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"BlogPostPage","type":"BLOG","urlPath":"/category/:categoryId/blogs/:boardId/create","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.GetInvolved","type":"COMMUNITY","urlPath":"/c/how-do-i/get-involved","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.Learn","type":"COMMUNITY","urlPath":"/c/how-do-i/learn","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1739501996000,"localOverride":null,"page":{"id":"Test","type":"CUSTOM","urlPath":"/custom-test-2","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ThemeEditorPage","type":"COMMUNITY","urlPath":"/designer/themes","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"TkbViewAllArticlesPage","type":"TKB","urlPath":"/category/:categoryId/kb/:boardId/all-articles/(/:after|/:before)?","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"OccasionEditPage","type":"EVENT","urlPath":"/event/:boardId/:messageSubject/:messageId/edit","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"OAuthAuthorizationAllowPage","type":"USER","urlPath":"/auth/authorize/allow","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"PageEditorPage","type":"COMMUNITY","urlPath":"/designer/pages","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"PostPage","type":"COMMUNITY","urlPath":"/category/:categoryId/:boardId/create","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ForumBoardPage","type":"FORUM","urlPath":"/category/:categoryId/discussions/:boardId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"TkbBoardPage","type":"TKB","urlPath":"/category/:categoryId/kb/:boardId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"EventPostPage","type":"EVENT","urlPath":"/category/:categoryId/events/:boardId/create","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"UserBadgesPage","type":"COMMUNITY","urlPath":"/users/:login/:userId/badges","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"GroupHubMembershipAction","type":"GROUP_HUB","urlPath":"/membership/join/:nodeId/:membershipType","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"MaintenancePage","type":"COMMUNITY","urlPath":"/maintenance","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"IdeaReplyPage","type":"IDEA_REPLY","urlPath":"/idea/:boardId/:messageSubject/:messageId/comments/:replyId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"UserSettingsPage","type":"USER","urlPath":"/mysettings/:userSettingsTab","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"GroupHubsPage","type":"GROUP_HUB","urlPath":"/groups","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ForumPostPage","type":"FORUM","urlPath":"/category/:categoryId/discussions/:boardId/create","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"OccasionRsvpActionPage","type":"OCCASION","urlPath":"/event/:boardId/:messageSubject/:messageId/rsvp/:responseType","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"VerifyUserEmailPage","type":"USER","urlPath":"/verifyemail/:userId/:verifyEmailToken","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"AllOccasionsPage","type":"OCCASION","urlPath":"/category/:categoryId/events/:boardId/all-events/(/:after|/:before)?","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"EventBoardPage","type":"EVENT","urlPath":"/category/:categoryId/events/:boardId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"TkbReplyPage","type":"TKB_REPLY","urlPath":"/kb/:boardId/:messageSubject/:messageId/comments/:replyId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"IdeaBoardPage","type":"IDEA","urlPath":"/category/:categoryId/ideas/:boardId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"CommunityGuideLinesPage","type":"COMMUNITY","urlPath":"/communityguidelines","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"CaseCreatePage","type":"SALESFORCE_CASE_CREATION","urlPath":"/caseportal/create","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"TkbEditPage","type":"TKB","urlPath":"/kb/:boardId/:messageSubject/:messageId/edit","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ForgotPasswordPage","type":"USER","urlPath":"/forgotpassword","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"IdeaEditPage","type":"IDEA","urlPath":"/idea/:boardId/:messageSubject/:messageId/edit","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"TagPage","type":"COMMUNITY","urlPath":"/tag/:tagName","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"BlogBoardPage","type":"BLOG","urlPath":"/category/:categoryId/blog/:boardId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"OccasionMessagePage","type":"OCCASION_TOPIC","urlPath":"/event/:boardId/:messageSubject/:messageId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ManageContentPage","type":"COMMUNITY","urlPath":"/managecontent","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ClosedMembershipNodeNonMembersPage","type":"GROUP_HUB","urlPath":"/closedgroup/:groupHubId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.GetHelp.Community","type":"COMMUNITY","urlPath":"/c/how-do-i/get-help/community","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"CommunityPage","type":"COMMUNITY","urlPath":"/","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.GetInvolved.ContributeCode","type":"COMMUNITY","urlPath":"/c/how-do-i/get-involved/contribute-code","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ForumMessagePage","type":"FORUM_TOPIC","urlPath":"/discussions/:boardId/:messageSubject/:messageId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"IdeaPostPage","type":"IDEA","urlPath":"/category/:categoryId/ideas/:boardId/create","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"BlogMessagePage","type":"BLOG_ARTICLE","urlPath":"/blog/:boardId/:messageSubject/:messageId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"RegistrationPage","type":"USER","urlPath":"/register","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"EditGroupHubPage","type":"GROUP_HUB","urlPath":"/group/:groupHubId/edit","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ForumEditPage","type":"FORUM","urlPath":"/discussions/:boardId/:messageSubject/:messageId/edit","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ResetPasswordPage","type":"USER","urlPath":"/resetpassword/:userId/:resetPasswordToken","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"TkbMessagePage","type":"TKB_ARTICLE","urlPath":"/kb/:boardId/:messageSubject/:messageId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.Learn.AboutIrules","type":"COMMUNITY","urlPath":"/c/how-do-i/learn/about-irules","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"BlogEditPage","type":"BLOG","urlPath":"/blog/:boardId/:messageSubject/:messageId/edit","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.GetHelp.F5Support","type":"COMMUNITY","urlPath":"/c/how-do-i/get-help/f5-support","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ManageUsersPage","type":"USER","urlPath":"/users/manage/:tab?/:manageUsersTab?","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ForumReplyPage","type":"FORUM_REPLY","urlPath":"/discussions/:boardId/:messageSubject/:messageId/replies/:replyId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"PrivacyPolicyPage","type":"COMMUNITY","urlPath":"/privacypolicy","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"NotificationPage","type":"COMMUNITY","urlPath":"/notifications","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"UserPage","type":"USER","urlPath":"/users/:login/:userId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HealthCheckPage","type":"COMMUNITY","urlPath":"/health","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"OccasionReplyPage","type":"OCCASION_REPLY","urlPath":"/event/:boardId/:messageSubject/:messageId/comments/:replyId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ManageMembersPage","type":"GROUP_HUB","urlPath":"/group/:groupHubId/manage/:tab?","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"SearchResultsPage","type":"COMMUNITY","urlPath":"/search","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"BlogReplyPage","type":"BLOG_REPLY","urlPath":"/blog/:boardId/:messageSubject/:messageId/replies/:replyId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"GroupHubPage","type":"GROUP_HUB","urlPath":"/group/:groupHubId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"TermsOfServicePage","type":"COMMUNITY","urlPath":"/termsofservice","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.GetHelp","type":"COMMUNITY","urlPath":"/c/how-do-i/get-help","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI.GetHelp.SecurityIncident","type":"COMMUNITY","urlPath":"/c/how-do-i/get-help/security-incident","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"CategoryPage","type":"CATEGORY","urlPath":"/category/:categoryId","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"ForumViewAllTopicsPage","type":"FORUM","urlPath":"/category/:categoryId/discussions/:boardId/all-topics/(/:after|/:before)?","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"TkbPostPage","type":"TKB","urlPath":"/category/:categoryId/kbs/:boardId/create","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"GroupHubPostPage","type":"GROUP_HUB","urlPath":"/group/:groupHubId/:boardId/create","__typename":"PageDescriptor"},"__typename":"PageResource"},{"lastUpdatedTime":1742463843167,"localOverride":null,"page":{"id":"HowDoI","type":"COMMUNITY","urlPath":"/c/how-do-i","__typename":"PageDescriptor"},"__typename":"PageResource"}],"localOverride":false},"CachedAsset:text:en_US-components/context/AppContext/AppContextProvider-0":{"__typename":"CachedAsset","id":"text:en_US-components/context/AppContext/AppContextProvider-0","value":{"noCommunity":"Cannot find community","noUser":"Cannot find current user","noNode":"Cannot find node with id {nodeId}","noMessage":"Cannot find message with id {messageId}"},"localOverride":false},"CachedAsset:text:en_US-shared/client/components/common/Loading/LoadingDot-0":{"__typename":"CachedAsset","id":"text:en_US-shared/client/components/common/Loading/LoadingDot-0","value":{"title":"Loading..."},"localOverride":false},"User:user:-1":{"__typename":"User","id":"user:-1","uid":-1,"login":"Former Member","email":"","avatar":null,"rank":null,"kudosWeight":1,"registrationData":{"__typename":"RegistrationData","status":"ANONYMOUS","registrationTime":null,"confirmEmailStatus":false,"registrationAccessLevel":"VIEW","ssoRegistrationFields":[]},"ssoId":null,"profileSettings":{"__typename":"ProfileSettings","dateDisplayStyle":{"__typename":"InheritableStringSettingWithPossibleValues","key":"layout.friendly_dates_enabled","value":"false","localValue":"true","possibleValues":["true","false"]},"dateDisplayFormat":{"__typename":"InheritableStringSetting","key":"layout.format_pattern_date","value":"dd-MMM-yyyy","localValue":"MM-dd-yyyy"},"language":{"__typename":"InheritableStringSettingWithPossibleValues","key":"profile.language","value":"en-US","localValue":null,"possibleValues":["en-US"]}},"deleted":false},"Theme:customTheme1":{"__typename":"Theme","id":"customTheme1"},"CachedAsset:theme:customTheme1-1742463842725":{"__typename":"CachedAsset","id":"theme:customTheme1-1742463842725","value":{"id":"customTheme1","animation":{"fast":"150ms","normal":"250ms","slow":"500ms","slowest":"750ms","function":"cubic-bezier(0.07, 0.91, 0.51, 1)","__typename":"AnimationThemeSettings"},"avatar":{"borderRadius":"50%","collections":["custom"],"__typename":"AvatarThemeSettings"},"basics":{"browserIcon":{"imageAssetName":"JimmyPackets-512-1702592938213.png","imageLastModified":"1702592945815","__typename":"ThemeAsset"},"customerLogo":{"imageAssetName":"f5_logo_fix-1704824537976.svg","imageLastModified":"1704824540697","__typename":"ThemeAsset"},"maximumWidthOfPageContent":"1600px","oneColumnNarrowWidth":"800px","gridGutterWidthMd":"30px","gridGutterWidthXs":"10px","pageWidthStyle":"WIDTH_OF_PAGE_CONTENT","__typename":"BasicsThemeSettings"},"buttons":{"borderRadiusSm":"5px","borderRadius":"5px","borderRadiusLg":"5px","paddingY":"5px","paddingYLg":"7px","paddingYHero":"var(--lia-bs-btn-padding-y-lg)","paddingX":"12px","paddingXLg":"14px","paddingXHero":"42px","fontStyle":"NORMAL","fontWeight":"400","textTransform":"NONE","disabledOpacity":0.5,"primaryTextColor":"var(--lia-bs-white)","primaryTextHoverColor":"var(--lia-bs-white)","primaryTextActiveColor":"var(--lia-bs-white)","primaryBgColor":"var(--lia-bs-primary)","primaryBgHoverColor":"hsl(var(--lia-bs-primary-h), var(--lia-bs-primary-s), calc(var(--lia-bs-primary-l) * 0.85))","primaryBgActiveColor":"hsl(var(--lia-bs-primary-h), var(--lia-bs-primary-s), calc(var(--lia-bs-primary-l) * 0.7))","primaryBorder":"1px solid transparent","primaryBorderHover":"1px solid transparent","primaryBorderActive":"1px solid transparent","primaryBorderFocus":"1px solid var(--lia-bs-white)","primaryBoxShadowFocus":"0 0 0 1px var(--lia-bs-primary), 0 0 0 4px hsla(var(--lia-bs-primary-h), var(--lia-bs-primary-s), var(--lia-bs-primary-l), 0.2)","secondaryTextColor":"var(--lia-bs-gray-900)","secondaryTextHoverColor":"hsl(var(--lia-bs-gray-900-h), var(--lia-bs-gray-900-s), calc(var(--lia-bs-gray-900-l) * 0.95))","secondaryTextActiveColor":"hsl(var(--lia-bs-gray-900-h), var(--lia-bs-gray-900-s), calc(var(--lia-bs-gray-900-l) * 0.9))","secondaryBgColor":"var(--lia-bs-gray-400)","secondaryBgHoverColor":"hsl(var(--lia-bs-gray-400-h), var(--lia-bs-gray-400-s), calc(var(--lia-bs-gray-400-l) * 0.96))","secondaryBgActiveColor":"hsl(var(--lia-bs-gray-400-h), var(--lia-bs-gray-400-s), calc(var(--lia-bs-gray-400-l) * 0.92))","secondaryBorder":"1px solid transparent","secondaryBorderHover":"1px solid transparent","secondaryBorderActive":"1px solid transparent","secondaryBorderFocus":"1px solid transparent","secondaryBoxShadowFocus":"0 0 0 1px var(--lia-bs-primary), 0 0 0 4px hsla(var(--lia-bs-primary-h), var(--lia-bs-primary-s), var(--lia-bs-primary-l), 0.2)","tertiaryTextColor":"var(--lia-bs-gray-900)","tertiaryTextHoverColor":"hsl(var(--lia-bs-gray-900-h), var(--lia-bs-gray-900-s), calc(var(--lia-bs-gray-900-l) * 0.95))","tertiaryTextActiveColor":"hsl(var(--lia-bs-gray-900-h), var(--lia-bs-gray-900-s), calc(var(--lia-bs-gray-900-l) * 0.9))","tertiaryBgColor":"transparent","tertiaryBgHoverColor":"transparent","tertiaryBgActiveColor":"hsla(var(--lia-bs-black-h), var(--lia-bs-black-s), var(--lia-bs-black-l), 0.04)","tertiaryBorder":"1px solid transparent","tertiaryBorderHover":"1px solid hsla(var(--lia-bs-black-h), var(--lia-bs-black-s), var(--lia-bs-black-l), 0.08)","tertiaryBorderActive":"1px solid transparent","tertiaryBorderFocus":"1px solid transparent","tertiaryBoxShadowFocus":"0 0 0 1px var(--lia-bs-primary), 0 0 0 4px hsla(var(--lia-bs-primary-h), var(--lia-bs-primary-s), var(--lia-bs-primary-l), 0.2)","destructiveTextColor":"var(--lia-bs-danger)","destructiveTextHoverColor":"hsl(var(--lia-bs-danger-h), var(--lia-bs-danger-s), calc(var(--lia-bs-danger-l) * 0.95))","destructiveTextActiveColor":"hsl(var(--lia-bs-danger-h), var(--lia-bs-danger-s), calc(var(--lia-bs-danger-l) * 0.9))","destructiveBgColor":"var(--lia-bs-gray-300)","destructiveBgHoverColor":"hsl(var(--lia-bs-gray-300-h), var(--lia-bs-gray-300-s), calc(var(--lia-bs-gray-300-l) * 0.96))","destructiveBgActiveColor":"hsl(var(--lia-bs-gray-300-h), var(--lia-bs-gray-300-s), calc(var(--lia-bs-gray-300-l) * 0.92))","destructiveBorder":"1px solid transparent","destructiveBorderHover":"1px solid transparent","destructiveBorderActive":"1px solid transparent","destructiveBorderFocus":"1px solid transparent","destructiveBoxShadowFocus":"0 0 0 1px var(--lia-bs-primary), 0 0 0 4px hsla(var(--lia-bs-primary-h), var(--lia-bs-primary-s), var(--lia-bs-primary-l), 0.2)","__typename":"ButtonsThemeSettings"},"border":{"color":"hsla(var(--lia-bs-black-h), var(--lia-bs-black-s), var(--lia-bs-black-l), 0.08)","mainContent":"NONE","sideContent":"NONE","radiusSm":"3px","radius":"5px","radiusLg":"9px","radius50":"100vw","__typename":"BorderThemeSettings"},"boxShadow":{"xs":"0 0 0 1px hsla(var(--lia-bs-gray-900-h), var(--lia-bs-gray-900-s), var(--lia-bs-gray-900-l), 0.08), 0 3px 0 -1px hsla(var(--lia-bs-gray-900-h), var(--lia-bs-gray-900-s), var(--lia-bs-gray-900-l), 0.08)","sm":"0 2px 4px hsla(var(--lia-bs-gray-900-h), var(--lia-bs-gray-900-s), var(--lia-bs-gray-900-l), 0.06)","md":"0 5px 15px hsla(var(--lia-bs-gray-900-h), var(--lia-bs-gray-900-s), var(--lia-bs-gray-900-l), 0.15)","lg":"0 10px 30px hsla(var(--lia-bs-gray-900-h), var(--lia-bs-gray-900-s), var(--lia-bs-gray-900-l), 0.15)","__typename":"BoxShadowThemeSettings"},"cards":{"bgColor":"var(--lia-panel-bg-color)","borderRadius":"var(--lia-panel-border-radius)","boxShadow":"var(--lia-box-shadow-xs)","__typename":"CardsThemeSettings"},"chip":{"maxWidth":"300px","height":"30px","__typename":"ChipThemeSettings"},"coreTypes":{"defaultMessageLinkColor":"var(--lia-bs-primary)","defaultMessageLinkDecoration":"none","defaultMessageLinkFontStyle":"NORMAL","defaultMessageLinkFontWeight":"400","defaultMessageFontStyle":"NORMAL","defaultMessageFontWeight":"400","forumColor":"#0C5C8D","forumFontFamily":"var(--lia-bs-font-family-base)","forumFontWeight":"var(--lia-default-message-font-weight)","forumLineHeight":"var(--lia-bs-line-height-base)","forumFontStyle":"var(--lia-default-message-font-style)","forumMessageLinkColor":"var(--lia-default-message-link-color)","forumMessageLinkDecoration":"var(--lia-default-message-link-decoration)","forumMessageLinkFontStyle":"var(--lia-default-message-link-font-style)","forumMessageLinkFontWeight":"var(--lia-default-message-link-font-weight)","forumSolvedColor":"#62C026","blogColor":"#730015","blogFontFamily":"var(--lia-bs-font-family-base)","blogFontWeight":"var(--lia-default-message-font-weight)","blogLineHeight":"1.75","blogFontStyle":"var(--lia-default-message-font-style)","blogMessageLinkColor":"var(--lia-default-message-link-color)","blogMessageLinkDecoration":"var(--lia-default-message-link-decoration)","blogMessageLinkFontStyle":"var(--lia-default-message-link-font-style)","blogMessageLinkFontWeight":"var(--lia-default-message-link-font-weight)","tkbColor":"#C20025","tkbFontFamily":"var(--lia-bs-font-family-base)","tkbFontWeight":"var(--lia-default-message-font-weight)","tkbLineHeight":"1.75","tkbFontStyle":"var(--lia-default-message-font-style)","tkbMessageLinkColor":"var(--lia-default-message-link-color)","tkbMessageLinkDecoration":"var(--lia-default-message-link-decoration)","tkbMessageLinkFontStyle":"var(--lia-default-message-link-font-style)","tkbMessageLinkFontWeight":"var(--lia-default-message-link-font-weight)","qandaColor":"#4099E2","qandaFontFamily":"var(--lia-bs-font-family-base)","qandaFontWeight":"var(--lia-default-message-font-weight)","qandaLineHeight":"var(--lia-bs-line-height-base)","qandaFontStyle":"var(--lia-default-message-link-font-style)","qandaMessageLinkColor":"var(--lia-default-message-link-color)","qandaMessageLinkDecoration":"var(--lia-default-message-link-decoration)","qandaMessageLinkFontStyle":"var(--lia-default-message-link-font-style)","qandaMessageLinkFontWeight":"var(--lia-default-message-link-font-weight)","qandaSolvedColor":"#3FA023","ideaColor":"#F3704B","ideaFontFamily":"var(--lia-bs-font-family-base)","ideaFontWeight":"var(--lia-default-message-font-weight)","ideaLineHeight":"var(--lia-bs-line-height-base)","ideaFontStyle":"var(--lia-default-message-font-style)","ideaMessageLinkColor":"var(--lia-default-message-link-color)","ideaMessageLinkDecoration":"var(--lia-default-message-link-decoration)","ideaMessageLinkFontStyle":"var(--lia-default-message-link-font-style)","ideaMessageLinkFontWeight":"var(--lia-default-message-link-font-weight)","contestColor":"#FCC845","contestFontFamily":"var(--lia-bs-font-family-base)","contestFontWeight":"var(--lia-default-message-font-weight)","contestLineHeight":"var(--lia-bs-line-height-base)","contestFontStyle":"var(--lia-default-message-link-font-style)","contestMessageLinkColor":"var(--lia-default-message-link-color)","contestMessageLinkDecoration":"var(--lia-default-message-link-decoration)","contestMessageLinkFontStyle":"ITALIC","contestMessageLinkFontWeight":"var(--lia-default-message-link-font-weight)","occasionColor":"#EE4B5B","occasionFontFamily":"var(--lia-bs-font-family-base)","occasionFontWeight":"var(--lia-default-message-font-weight)","occasionLineHeight":"var(--lia-bs-line-height-base)","occasionFontStyle":"var(--lia-default-message-font-style)","occasionMessageLinkColor":"var(--lia-default-message-link-color)","occasionMessageLinkDecoration":"var(--lia-default-message-link-decoration)","occasionMessageLinkFontStyle":"var(--lia-default-message-link-font-style)","occasionMessageLinkFontWeight":"var(--lia-default-message-link-font-weight)","grouphubColor":"#491B62","categoryColor":"#949494","communityColor":"#FFFFFF","productColor":"#949494","__typename":"CoreTypesThemeSettings"},"colors":{"black":"#000000","white":"#FFFFFF","gray100":"#F7F7F7","gray200":"#F7F7F7","gray300":"#E8E8E8","gray400":"#D9D9D9","gray500":"#CCCCCC","gray600":"#949494","gray700":"#707070","gray800":"#545454","gray900":"#333333","dark":"#545454","light":"#F7F7F7","primary":"#0C5C8D","secondary":"#333333","bodyText":"#222222","bodyBg":"#F5F5F5","info":"#1D9CD3","success":"#62C026","warning":"#FFD651","danger":"#C20025","alertSystem":"#FF6600","textMuted":"#707070","highlight":"#FFFCAD","outline":"var(--lia-bs-primary)","custom":["#C20025","#081B85","#009639","#B3C6D7","#7CC0EB","#F29A36"],"__typename":"ColorsThemeSettings"},"divider":{"size":"3px","marginLeft":"4px","marginRight":"4px","borderRadius":"50%","bgColor":"var(--lia-bs-gray-600)","bgColorActive":"var(--lia-bs-gray-600)","__typename":"DividerThemeSettings"},"dropdown":{"fontSize":"var(--lia-bs-font-size-sm)","borderColor":"var(--lia-bs-border-color)","borderRadius":"var(--lia-bs-border-radius-sm)","dividerBg":"var(--lia-bs-gray-300)","itemPaddingY":"5px","itemPaddingX":"20px","headerColor":"var(--lia-bs-gray-700)","__typename":"DropdownThemeSettings"},"email":{"link":{"color":"#0069D4","hoverColor":"#0061c2","decoration":"none","hoverDecoration":"underline","__typename":"EmailLinkSettings"},"border":{"color":"#e4e4e4","__typename":"EmailBorderSettings"},"buttons":{"borderRadiusLg":"5px","paddingXLg":"16px","paddingYLg":"7px","fontWeight":"700","primaryTextColor":"#ffffff","primaryTextHoverColor":"#ffffff","primaryBgColor":"#0069D4","primaryBgHoverColor":"#005cb8","primaryBorder":"1px solid transparent","primaryBorderHover":"1px solid transparent","__typename":"EmailButtonsSettings"},"panel":{"borderRadius":"5px","borderColor":"#e4e4e4","__typename":"EmailPanelSettings"},"__typename":"EmailThemeSettings"},"emoji":{"skinToneDefault":"#ffcd43","skinToneLight":"#fae3c5","skinToneMediumLight":"#e2cfa5","skinToneMedium":"#daa478","skinToneMediumDark":"#a78058","skinToneDark":"#5e4d43","__typename":"EmojiThemeSettings"},"heading":{"color":"var(--lia-bs-body-color)","fontFamily":"Inter","fontStyle":"NORMAL","fontWeight":"600","h1FontSize":"30px","h2FontSize":"25px","h3FontSize":"20px","h4FontSize":"18px","h5FontSize":"16px","h6FontSize":"16px","lineHeight":"1.2","subHeaderFontSize":"11px","subHeaderFontWeight":"500","h1LetterSpacing":"normal","h2LetterSpacing":"normal","h3LetterSpacing":"normal","h4LetterSpacing":"normal","h5LetterSpacing":"normal","h6LetterSpacing":"normal","subHeaderLetterSpacing":"2px","h1FontWeight":"var(--lia-bs-headings-font-weight)","h2FontWeight":"var(--lia-bs-headings-font-weight)","h3FontWeight":"var(--lia-bs-headings-font-weight)","h4FontWeight":"var(--lia-bs-headings-font-weight)","h5FontWeight":"var(--lia-bs-headings-font-weight)","h6FontWeight":"var(--lia-bs-headings-font-weight)","__typename":"HeadingThemeSettings"},"icons":{"size10":"10px","size12":"12px","size14":"14px","size16":"16px","size20":"20px","size24":"24px","size30":"30px","size40":"40px","size50":"50px","size60":"60px","size80":"80px","size120":"120px","size160":"160px","__typename":"IconsThemeSettings"},"imagePreview":{"bgColor":"var(--lia-bs-gray-900)","titleColor":"var(--lia-bs-white)","controlColor":"var(--lia-bs-white)","controlBgColor":"var(--lia-bs-gray-800)","__typename":"ImagePreviewThemeSettings"},"input":{"borderColor":"var(--lia-bs-gray-600)","disabledColor":"var(--lia-bs-gray-600)","focusBorderColor":"var(--lia-bs-primary)","labelMarginBottom":"10px","btnFontSize":"var(--lia-bs-font-size-sm)","focusBoxShadow":"0 0 0 3px hsla(var(--lia-bs-primary-h), var(--lia-bs-primary-s), var(--lia-bs-primary-l), 0.2)","checkLabelMarginBottom":"2px","checkboxBorderRadius":"3px","borderRadiusSm":"var(--lia-bs-border-radius-sm)","borderRadius":"var(--lia-bs-border-radius)","borderRadiusLg":"var(--lia-bs-border-radius-lg)","formTextMarginTop":"4px","textAreaBorderRadius":"var(--lia-bs-border-radius)","activeFillColor":"var(--lia-bs-primary)","__typename":"InputThemeSettings"},"loading":{"dotDarkColor":"hsla(var(--lia-bs-black-h), var(--lia-bs-black-s), var(--lia-bs-black-l), 0.2)","dotLightColor":"hsla(var(--lia-bs-white-h), var(--lia-bs-white-s), var(--lia-bs-white-l), 0.5)","barDarkColor":"hsla(var(--lia-bs-black-h), var(--lia-bs-black-s), var(--lia-bs-black-l), 0.06)","barLightColor":"hsla(var(--lia-bs-white-h), var(--lia-bs-white-s), var(--lia-bs-white-l), 0.4)","__typename":"LoadingThemeSettings"},"link":{"color":"var(--lia-bs-primary)","hoverColor":"hsl(var(--lia-bs-primary-h), var(--lia-bs-primary-s), calc(var(--lia-bs-primary-l) - 10%))","decoration":"none","hoverDecoration":"underline","__typename":"LinkThemeSettings"},"listGroup":{"itemPaddingY":"15px","itemPaddingX":"15px","borderColor":"var(--lia-bs-gray-300)","__typename":"ListGroupThemeSettings"},"modal":{"contentTextColor":"var(--lia-bs-body-color)","contentBg":"var(--lia-bs-white)","backgroundBg":"var(--lia-bs-black)","smSize":"440px","mdSize":"760px","lgSize":"1080px","backdropOpacity":0.3,"contentBoxShadowXs":"var(--lia-bs-box-shadow-sm)","contentBoxShadow":"var(--lia-bs-box-shadow)","headerFontWeight":"700","__typename":"ModalThemeSettings"},"navbar":{"position":"FIXED","background":{"attachment":null,"clip":null,"color":"var(--lia-bs-white)","imageAssetName":null,"imageLastModified":"0","origin":null,"position":"CENTER_CENTER","repeat":"NO_REPEAT","size":"COVER","__typename":"BackgroundProps"},"backgroundOpacity":0.8,"paddingTop":"15px","paddingBottom":"15px","borderBottom":"1px solid var(--lia-bs-border-color)","boxShadow":"var(--lia-bs-box-shadow-sm)","brandMarginRight":"30px","brandMarginRightSm":"10px","brandLogoHeight":"30px","linkGap":"10px","linkJustifyContent":"flex-start","linkPaddingY":"5px","linkPaddingX":"10px","linkDropdownPaddingY":"9px","linkDropdownPaddingX":"var(--lia-nav-link-px)","linkColor":"var(--lia-bs-body-color)","linkHoverColor":"var(--lia-bs-primary)","linkFontSize":"var(--lia-bs-font-size-sm)","linkFontStyle":"NORMAL","linkFontWeight":"400","linkTextTransform":"NONE","linkLetterSpacing":"normal","linkBorderRadius":"var(--lia-bs-border-radius-sm)","linkBgColor":"transparent","linkBgHoverColor":"transparent","linkBorder":"none","linkBorderHover":"none","linkBoxShadow":"none","linkBoxShadowHover":"none","linkTextBorderBottom":"none","linkTextBorderBottomHover":"none","dropdownPaddingTop":"10px","dropdownPaddingBottom":"15px","dropdownPaddingX":"10px","dropdownMenuOffset":"2px","dropdownDividerMarginTop":"10px","dropdownDividerMarginBottom":"10px","dropdownBorderColor":"hsla(var(--lia-bs-black-h), var(--lia-bs-black-s), var(--lia-bs-black-l), 0.08)","controllerBgHoverColor":"hsla(var(--lia-bs-black-h), var(--lia-bs-black-s), var(--lia-bs-black-l), 0.1)","controllerIconColor":"var(--lia-bs-body-color)","controllerIconHoverColor":"var(--lia-bs-body-color)","controllerTextColor":"var(--lia-nav-controller-icon-color)","controllerTextHoverColor":"var(--lia-nav-controller-icon-hover-color)","controllerHighlightColor":"hsla(30, 100%, 50%)","controllerHighlightTextColor":"var(--lia-yiq-light)","controllerBorderRadius":"var(--lia-border-radius-50)","hamburgerColor":"var(--lia-nav-controller-icon-color)","hamburgerHoverColor":"var(--lia-nav-controller-icon-color)","hamburgerBgColor":"transparent","hamburgerBgHoverColor":"transparent","hamburgerBorder":"none","hamburgerBorderHover":"none","collapseMenuMarginLeft":"20px","collapseMenuDividerBg":"var(--lia-nav-link-color)","collapseMenuDividerOpacity":0.16,"__typename":"NavbarThemeSettings"},"pager":{"textColor":"var(--lia-bs-link-color)","textFontWeight":"var(--lia-font-weight-md)","textFontSize":"var(--lia-bs-font-size-sm)","__typename":"PagerThemeSettings"},"panel":{"bgColor":"var(--lia-bs-white)","borderRadius":"var(--lia-bs-border-radius)","borderColor":"var(--lia-bs-border-color)","boxShadow":"none","__typename":"PanelThemeSettings"},"popover":{"arrowHeight":"8px","arrowWidth":"16px","maxWidth":"300px","minWidth":"100px","headerBg":"var(--lia-bs-white)","borderColor":"var(--lia-bs-border-color)","borderRadius":"var(--lia-bs-border-radius)","boxShadow":"0 0.5rem 1rem hsla(var(--lia-bs-black-h), var(--lia-bs-black-s), var(--lia-bs-black-l), 0.15)","__typename":"PopoverThemeSettings"},"prism":{"color":"#000000","bgColor":"#f5f2f0","fontFamily":"var(--font-family-monospace)","fontSize":"var(--lia-bs-font-size-base)","fontWeightBold":"var(--lia-bs-font-weight-bold)","fontStyleItalic":"italic","tabSize":2,"highlightColor":"#b3d4fc","commentColor":"#62707e","punctuationColor":"#6f6f6f","namespaceOpacity":"0.7","propColor":"#990055","selectorColor":"#517a00","operatorColor":"#906736","operatorBgColor":"hsla(0, 0%, 100%, 0.5)","keywordColor":"#0076a9","functionColor":"#d3284b","variableColor":"#c14700","__typename":"PrismThemeSettings"},"rte":{"bgColor":"var(--lia-bs-white)","borderRadius":"var(--lia-panel-border-radius)","boxShadow":" var(--lia-panel-box-shadow)","customColor1":"#bfedd2","customColor2":"#fbeeb8","customColor3":"#f8cac6","customColor4":"#eccafa","customColor5":"#c2e0f4","customColor6":"#2dc26b","customColor7":"#f1c40f","customColor8":"#e03e2d","customColor9":"#b96ad9","customColor10":"#3598db","customColor11":"#169179","customColor12":"#e67e23","customColor13":"#ba372a","customColor14":"#843fa1","customColor15":"#236fa1","customColor16":"#ecf0f1","customColor17":"#ced4d9","customColor18":"#95a5a6","customColor19":"#7e8c8d","customColor20":"#34495e","customColor21":"#000000","customColor22":"#ffffff","defaultMessageHeaderMarginTop":"14px","defaultMessageHeaderMarginBottom":"10px","defaultMessageItemMarginTop":"0","defaultMessageItemMarginBottom":"10px","diffAddedColor":"hsla(170, 53%, 51%, 0.4)","diffChangedColor":"hsla(43, 97%, 63%, 0.4)","diffNoneColor":"hsla(0, 0%, 80%, 0.4)","diffRemovedColor":"hsla(9, 74%, 47%, 0.4)","specialMessageHeaderMarginTop":"14px","specialMessageHeaderMarginBottom":"10px","specialMessageItemMarginTop":"0","specialMessageItemMarginBottom":"10px","__typename":"RteThemeSettings"},"tags":{"bgColor":"var(--lia-bs-gray-200)","bgHoverColor":"var(--lia-bs-gray-400)","borderRadius":"var(--lia-bs-border-radius-sm)","color":"var(--lia-bs-body-color)","hoverColor":"var(--lia-bs-body-color)","fontWeight":"var(--lia-font-weight-md)","fontSize":"var(--lia-font-size-xxs)","textTransform":"UPPERCASE","letterSpacing":"0.5px","__typename":"TagsThemeSettings"},"toasts":{"borderRadius":"var(--lia-bs-border-radius)","paddingX":"12px","__typename":"ToastsThemeSettings"},"typography":{"fontFamilyBase":"Atkinson Hyperlegible","fontStyleBase":"NORMAL","fontWeightBase":"400","fontWeightLight":"300","fontWeightNormal":"400","fontWeightMd":"500","fontWeightBold":"700","letterSpacingSm":"normal","letterSpacingXs":"normal","lineHeightBase":"1.3","fontSizeBase":"15px","fontSizeXxs":"11px","fontSizeXs":"12px","fontSizeSm":"13px","fontSizeLg":"20px","fontSizeXl":"24px","smallFontSize":"14px","customFonts":[],"__typename":"TypographyThemeSettings"},"unstyledListItem":{"marginBottomSm":"5px","marginBottomMd":"10px","marginBottomLg":"15px","marginBottomXl":"20px","marginBottomXxl":"25px","__typename":"UnstyledListItemThemeSettings"},"yiq":{"light":"#ffffff","dark":"#000000","__typename":"YiqThemeSettings"},"colorLightness":{"primaryDark":0.36,"primaryLight":0.74,"primaryLighter":0.89,"primaryLightest":0.95,"infoDark":0.39,"infoLight":0.72,"infoLighter":0.85,"infoLightest":0.93,"successDark":0.24,"successLight":0.62,"successLighter":0.8,"successLightest":0.91,"warningDark":0.39,"warningLight":0.68,"warningLighter":0.84,"warningLightest":0.93,"dangerDark":0.41,"dangerLight":0.72,"dangerLighter":0.89,"dangerLightest":0.95,"__typename":"ColorLightnessThemeSettings"},"localOverride":false,"__typename":"Theme"},"localOverride":false},"CachedAsset:text:en_US-shared/client/components/common/Loading/LoadingDot-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-shared/client/components/common/Loading/LoadingDot-1743097581414","value":{"title":"Loading..."},"localOverride":false},"CachedAsset:text:en_US-components/common/EmailVerification-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/common/EmailVerification-1743097581414","value":{"email.verification.title":"Email Verification Required","email.verification.message.update.email":"To participate in the community, you must first verify your email address. The verification email was sent to {email}. To change your email, visit My Settings.","email.verification.message.resend.email":"To participate in the community, you must first verify your email address. The verification email was sent to {email}. Resend email."},"localOverride":false},"CachedAsset:text:en_US-pages/tags/TagPage-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-pages/tags/TagPage-1743097581414","value":{"tagPageTitle":"Tag:\"{tagName}\" | {communityTitle}","tagPageForNodeTitle":"Tag:\"{tagName}\" in \"{title}\" | {communityTitle}","name":"Tags Page","tag":"Tag: {tagName}"},"localOverride":false},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bi0zNC0xM2k0MzE3N0Q2NjFBRDg5NDAy\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bi0zNC0xM2k0MzE3N0Q2NjFBRDg5NDAy","mimeType":"image/png"},"Category:category:Articles":{"__typename":"Category","id":"category:Articles","entityType":"CATEGORY","displayId":"Articles","nodeType":"category","depth":1,"title":"Articles","shortTitle":"Articles","parent":{"__ref":"Category:category:top"},"categoryPolicies":{"__typename":"CategoryPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Category:category:top":{"__typename":"Category","id":"category:top","displayId":"top","nodeType":"category","depth":0,"title":"Top"},"Tkb:board:TechnicalArticles":{"__typename":"Tkb","id":"board:TechnicalArticles","entityType":"TKB","displayId":"TechnicalArticles","nodeType":"board","depth":2,"conversationStyle":"TKB","title":"Technical Articles","description":"F5 SMEs share good practice.","avatar":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bi0zNC0xM2k0MzE3N0Q2NjFBRDg5NDAy\"}"},"profileSettings":{"__typename":"ProfileSettings","language":null},"parent":{"__ref":"Category:category:Articles"},"ancestors":{"__typename":"CoreNodeConnection","edges":[{"__typename":"CoreNodeEdge","node":{"__ref":"Community:community:zihoc95639"}},{"__typename":"CoreNodeEdge","node":{"__ref":"Category:category:Articles"}}]},"userContext":{"__typename":"NodeUserContext","canAddAttachments":false,"canUpdateNode":false,"canPostMessages":false,"isSubscribed":false},"boardPolicies":{"__typename":"BoardPolicies","canPublishArticleOnCreate":{"__typename":"PolicyResult","failureReason":{"__typename":"FailureReason","message":"error.lithium.policies.forums.policy_can_publish_on_create_workflow_action.accessDenied","key":"error.lithium.policies.forums.policy_can_publish_on_create_workflow_action.accessDenied","args":[]}},"canReadNode":{"__typename":"PolicyResult","failureReason":null}},"tkbPolicies":{"__typename":"TkbPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}},"shortTitle":"Technical Articles","tagPolicies":{"__typename":"TagPolicies","canSubscribeTagOnNode":{"__typename":"PolicyResult","failureReason":{"__typename":"FailureReason","message":"error.lithium.policies.labels.action.corenode.subscribe_labels.allow.accessDenied","key":"error.lithium.policies.labels.action.corenode.subscribe_labels.allow.accessDenied","args":[]}},"canManageTagDashboard":{"__typename":"PolicyResult","failureReason":{"__typename":"FailureReason","message":"error.lithium.policies.labels.action.corenode.admin_labels.allow.accessDenied","key":"error.lithium.policies.labels.action.corenode.admin_labels.allow.accessDenied","args":[]}}}},"CachedAsset:quilt:f5.prod:pages/tags/TagPage:board:TechnicalArticles-1743097581415":{"__typename":"CachedAsset","id":"quilt:f5.prod:pages/tags/TagPage:board:TechnicalArticles-1743097581415","value":{"id":"TagPage","container":{"id":"Common","headerProps":{"removeComponents":["community.widget.bannerWidget"],"__typename":"QuiltContainerSectionProps"},"items":[{"id":"tag-header-widget","layout":"ONE_COLUMN","bgColor":"var(--lia-bs-white)","showBorder":"BOTTOM","sectionEditLevel":"LOCKED","columnMap":{"main":[{"id":"tags.widget.TagsHeaderWidget","__typename":"QuiltComponent"}],"__typename":"OneSectionColumns"},"__typename":"OneColumnQuiltSection"},{"id":"messages-list-for-tag-widget","layout":"ONE_COLUMN","columnMap":{"main":[{"id":"messages.widget.messageListForNodeByRecentActivityWidget","props":{"viewVariant":{"type":"inline","props":{"useUnreadCount":true,"useViewCount":true,"useAuthorLogin":true,"clampBodyLines":3,"useAvatar":true,"useBoardIcon":false,"useKudosCount":true,"usePreviewMedia":true,"useTags":false,"useNode":true,"useNodeLink":true,"useTextBody":true,"truncateBodyLength":-1,"useBody":true,"useRepliesCount":true,"useSolvedBadge":true,"timeStampType":"conversation.lastPostingActivityTime","useMessageTimeLink":true,"clampSubjectLines":2}},"panelType":"divider","useTitle":false,"hideIfEmpty":false,"pagerVariant":{"type":"loadMore"},"style":"list","showTabs":true,"tabItemMap":{"default":{"mostRecent":true,"mostRecentUserContent":false,"newest":false},"additional":{"mostKudoed":true,"mostViewed":true,"mostReplies":false,"noReplies":false,"noSolutions":false,"solutions":false}}},"__typename":"QuiltComponent"}],"__typename":"OneSectionColumns"},"__typename":"OneColumnQuiltSection"}],"__typename":"QuiltContainer"},"__typename":"Quilt"},"localOverride":false},"CachedAsset:quiltWrapper:f5.prod:Common:1742463757226":{"__typename":"CachedAsset","id":"quiltWrapper:f5.prod:Common:1742463757226","value":{"id":"Common","header":{"backgroundImageProps":{"assetName":"header.jpg","backgroundSize":"COVER","backgroundRepeat":"NO_REPEAT","backgroundPosition":"LEFT_CENTER","lastModified":"1702932449000","__typename":"BackgroundImageProps"},"backgroundColor":"transparent","items":[{"id":"custom.widget.Beta_MetaNav","props":{"widgetVisibility":"signedInOrAnonymous","useTitle":true,"useBackground":false,"title":"","lazyLoad":false},"__typename":"QuiltComponent"},{"id":"community.widget.navbarWidget","props":{"showUserName":false,"showRegisterLink":true,"style":{"boxShadow":"var(--lia-bs-box-shadow-sm)","linkFontWeight":"700","controllerHighlightColor":"hsla(30, 100%, 50%)","dropdownDividerMarginBottom":"10px","hamburgerBorderHover":"none","linkFontSize":"15px","linkBoxShadowHover":"none","backgroundOpacity":0.4,"controllerBorderRadius":"var(--lia-border-radius-50)","hamburgerBgColor":"transparent","linkTextBorderBottom":"none","hamburgerColor":"var(--lia-nav-controller-icon-color)","brandLogoHeight":"48px","linkLetterSpacing":"normal","linkBgHoverColor":"transparent","collapseMenuDividerOpacity":0.16,"paddingBottom":"10px","dropdownPaddingBottom":"15px","dropdownMenuOffset":"2px","hamburgerBgHoverColor":"transparent","borderBottom":"0","hamburgerBorder":"none","dropdownPaddingX":"10px","brandMarginRightSm":"10px","linkBoxShadow":"none","linkJustifyContent":"center","linkColor":"var(--lia-bs-primary)","collapseMenuDividerBg":"var(--lia-nav-link-color)","dropdownPaddingTop":"10px","controllerHighlightTextColor":"var(--lia-yiq-dark)","background":{"imageAssetName":"","color":"var(--lia-bs-white)","size":"COVER","repeat":"NO_REPEAT","position":"CENTER_CENTER","imageLastModified":""},"linkBorderRadius":"var(--lia-bs-border-radius-sm)","linkHoverColor":"var(--lia-bs-primary)","position":"FIXED","linkBorder":"none","linkTextBorderBottomHover":"2px solid #0C5C8D","brandMarginRight":"30px","hamburgerHoverColor":"var(--lia-nav-controller-icon-color)","linkBorderHover":"none","collapseMenuMarginLeft":"20px","linkFontStyle":"NORMAL","linkPaddingX":"10px","paddingTop":"10px","linkPaddingY":"5px","linkTextTransform":"NONE","dropdownBorderColor":"hsla(var(--lia-bs-black-h), var(--lia-bs-black-s), var(--lia-bs-black-l), 0.08)","controllerBgHoverColor":"hsla(var(--lia-bs-black-h), var(--lia-bs-black-s), var(--lia-bs-black-l), 0.1)","linkDropdownPaddingX":"var(--lia-nav-link-px)","linkBgColor":"transparent","linkDropdownPaddingY":"9px","controllerIconColor":"#0C5C8D","dropdownDividerMarginTop":"10px","linkGap":"10px","controllerIconHoverColor":"#0C5C8D"},"links":{"sideLinks":[],"mainLinks":[{"children":[{"linkType":"INTERNAL","id":"migrated-link-1","params":{"boardId":"TechnicalForum","categoryId":"Forums"},"routeName":"ForumBoardPage"},{"linkType":"INTERNAL","id":"migrated-link-2","params":{"boardId":"WaterCooler","categoryId":"Forums"},"routeName":"ForumBoardPage"}],"linkType":"INTERNAL","id":"migrated-link-0","params":{"categoryId":"Forums"},"routeName":"CategoryPage"},{"children":[{"linkType":"INTERNAL","id":"migrated-link-4","params":{"boardId":"codeshare","categoryId":"CrowdSRC"},"routeName":"TkbBoardPage"},{"linkType":"INTERNAL","id":"migrated-link-5","params":{"boardId":"communityarticles","categoryId":"CrowdSRC"},"routeName":"TkbBoardPage"}],"linkType":"INTERNAL","id":"migrated-link-3","params":{"categoryId":"CrowdSRC"},"routeName":"CategoryPage"},{"children":[{"linkType":"INTERNAL","id":"migrated-link-7","params":{"boardId":"TechnicalArticles","categoryId":"Articles"},"routeName":"TkbBoardPage"},{"linkType":"INTERNAL","id":"article-series","params":{"boardId":"article-series","categoryId":"Articles"},"routeName":"TkbBoardPage"},{"linkType":"INTERNAL","id":"security-insights","params":{"boardId":"security-insights","categoryId":"Articles"},"routeName":"TkbBoardPage"},{"linkType":"INTERNAL","id":"migrated-link-8","params":{"boardId":"DevCentralNews","categoryId":"Articles"},"routeName":"TkbBoardPage"}],"linkType":"INTERNAL","id":"migrated-link-6","params":{"categoryId":"Articles"},"routeName":"CategoryPage"},{"children":[{"linkType":"INTERNAL","id":"migrated-link-10","params":{"categoryId":"CommunityGroups"},"routeName":"CategoryPage"},{"linkType":"INTERNAL","id":"migrated-link-11","params":{"categoryId":"F5-Groups"},"routeName":"CategoryPage"}],"linkType":"INTERNAL","id":"migrated-link-9","params":{"categoryId":"GroupsCategory"},"routeName":"CategoryPage"},{"children":[],"linkType":"INTERNAL","id":"migrated-link-12","params":{"boardId":"Events","categoryId":"top"},"routeName":"EventBoardPage"},{"children":[],"linkType":"INTERNAL","id":"migrated-link-13","params":{"boardId":"Suggestions","categoryId":"top"},"routeName":"IdeaBoardPage"},{"children":[],"linkType":"EXTERNAL","id":"Common-external-link","url":"https://community.f5.com/c/how-do-i","target":"SELF"}]},"className":"QuiltComponent_lia-component-edit-mode__lQ9Z6","showSearchIcon":false},"__typename":"QuiltComponent"},{"id":"community.widget.bannerWidget","props":{"backgroundColor":"transparent","visualEffects":{"showBottomBorder":false},"backgroundImageProps":{"backgroundSize":"COVER","backgroundPosition":"CENTER_CENTER","backgroundRepeat":"NO_REPEAT"},"fontColor":"#222222"},"__typename":"QuiltComponent"},{"id":"community.widget.breadcrumbWidget","props":{"backgroundColor":"var(--lia-bs-primary)","linkHighlightColor":"#FFFFFF","visualEffects":{"showBottomBorder":false},"backgroundOpacity":60,"linkTextColor":"#FFFFFF"},"__typename":"QuiltComponent"}],"__typename":"QuiltWrapperSection"},"footer":{"backgroundImageProps":{"assetName":null,"backgroundSize":"COVER","backgroundRepeat":"NO_REPEAT","backgroundPosition":"CENTER_CENTER","lastModified":null,"__typename":"BackgroundImageProps"},"backgroundColor":"var(--lia-bs-body-color)","items":[{"id":"custom.widget.Beta_Footer","props":{"widgetVisibility":"signedInOrAnonymous","useTitle":true,"useBackground":false,"title":"","lazyLoad":false},"__typename":"QuiltComponent"},{"id":"custom.widget.Tag_Manager_Helper","props":{"widgetVisibility":"signedInOrAnonymous","useTitle":true,"useBackground":false,"title":"","lazyLoad":false},"__typename":"QuiltComponent"},{"id":"custom.widget.Consent_Blackbar","props":{"widgetVisibility":"signedInOrAnonymous","useTitle":true,"useBackground":false,"title":"","lazyLoad":false},"__typename":"QuiltComponent"}],"__typename":"QuiltWrapperSection"},"__typename":"QuiltWrapper","localOverride":false},"localOverride":false},"CachedAsset:text:en_US-components/common/ActionFeedback-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/common/ActionFeedback-1743097581414","value":{"joinedGroupHub.title":"Welcome","joinedGroupHub.message":"You are now a member of this group and are subscribed to updates.","groupHubInviteNotFound.title":"Invitation Not Found","groupHubInviteNotFound.message":"Sorry, we could not find your invitation to the group. The owner may have canceled the invite.","groupHubNotFound.title":"Group Not Found","groupHubNotFound.message":"The grouphub you tried to join does not exist. It may have been deleted.","existingGroupHubMember.title":"Already Joined","existingGroupHubMember.message":"You are already a member of this group.","accountLocked.title":"Account Locked","accountLocked.message":"Your account has been locked due to multiple failed attempts. Try again in {lockoutTime} minutes.","editedGroupHub.title":"Changes Saved","editedGroupHub.message":"Your group has been updated.","leftGroupHub.title":"Goodbye","leftGroupHub.message":"You are no longer a member of this group and will not receive future updates.","deletedGroupHub.title":"Deleted","deletedGroupHub.message":"The group has been deleted.","groupHubCreated.title":"Group Created","groupHubCreated.message":"{groupHubName} is ready to use","accountClosed.title":"Account Closed","accountClosed.message":"The account has been closed and you will now be redirected to the homepage","resetTokenExpired.title":"Reset Password Link has Expired","resetTokenExpired.message":"Try resetting your password again","invalidUrl.title":"Invalid URL","invalidUrl.message":"The URL you're using is not recognized. Verify your URL and try again.","accountClosedForUser.title":"Account Closed","accountClosedForUser.message":"{userName}'s account is closed","inviteTokenInvalid.title":"Invitation Invalid","inviteTokenInvalid.message":"Your invitation to the community has been canceled or expired.","inviteTokenError.title":"Invitation Verification Failed","inviteTokenError.message":"The url you are utilizing is not recognized. Verify your URL and try again","pageNotFound.title":"Access Denied","pageNotFound.message":"You do not have access to this area of the community or it doesn't exist","eventAttending.title":"Responded as Attending","eventAttending.message":"You'll be notified when there's new activity and reminded as the event approaches","eventInterested.title":"Responded as Interested","eventInterested.message":"You'll be notified when there's new activity and reminded as the event approaches","eventNotFound.title":"Event Not Found","eventNotFound.message":"The event you tried to respond to does not exist.","redirectToRelatedPage.title":"Showing Related Content","redirectToRelatedPageForBaseUsers.title":"Showing Related Content","redirectToRelatedPageForBaseUsers.message":"The content you are trying to access is archived","redirectToRelatedPage.message":"The content you are trying to access is archived","relatedUrl.archivalLink.flyoutMessage":"The content you are trying to access is archived View Archived Content"},"localOverride":false},"CachedAsset:component:custom.widget.Beta_MetaNav-en-1742463858459":{"__typename":"CachedAsset","id":"component:custom.widget.Beta_MetaNav-en-1742463858459","value":{"component":{"id":"custom.widget.Beta_MetaNav","template":{"id":"Beta_MetaNav","markupLanguage":"HANDLEBARS","style":null,"texts":null,"defaults":{"config":{"applicablePages":[],"description":"MetaNav menu at the top of every page.","fetchedContent":null,"__typename":"ComponentConfiguration"},"props":[],"__typename":"ComponentProperties"},"components":[{"id":"custom.widget.Beta_MetaNav","form":null,"config":null,"props":[],"__typename":"Component"}],"grouping":"CUSTOM","__typename":"ComponentTemplate"},"properties":{"config":{"applicablePages":[],"description":"MetaNav menu at the top of every page.","fetchedContent":null,"__typename":"ComponentConfiguration"},"props":[],"__typename":"ComponentProperties"},"form":null,"__typename":"Component","localOverride":false},"globalCss":null,"form":null},"localOverride":false},"CachedAsset:component:custom.widget.Beta_Footer-en-1742463858459":{"__typename":"CachedAsset","id":"component:custom.widget.Beta_Footer-en-1742463858459","value":{"component":{"id":"custom.widget.Beta_Footer","template":{"id":"Beta_Footer","markupLanguage":"HANDLEBARS","style":null,"texts":null,"defaults":{"config":{"applicablePages":[],"description":"DevCentral´s custom footer.","fetchedContent":null,"__typename":"ComponentConfiguration"},"props":[],"__typename":"ComponentProperties"},"components":[{"id":"custom.widget.Beta_Footer","form":null,"config":null,"props":[],"__typename":"Component"}],"grouping":"CUSTOM","__typename":"ComponentTemplate"},"properties":{"config":{"applicablePages":[],"description":"DevCentral´s custom footer.","fetchedContent":null,"__typename":"ComponentConfiguration"},"props":[],"__typename":"ComponentProperties"},"form":null,"__typename":"Component","localOverride":false},"globalCss":null,"form":null},"localOverride":false},"CachedAsset:component:custom.widget.Tag_Manager_Helper-en-1742463858459":{"__typename":"CachedAsset","id":"component:custom.widget.Tag_Manager_Helper-en-1742463858459","value":{"component":{"id":"custom.widget.Tag_Manager_Helper","template":{"id":"Tag_Manager_Helper","markupLanguage":"HANDLEBARS","style":null,"texts":null,"defaults":{"config":{"applicablePages":[],"description":"Helper widget to inject Tag Manager scripts into head element","fetchedContent":null,"__typename":"ComponentConfiguration"},"props":[],"__typename":"ComponentProperties"},"components":[{"id":"custom.widget.Tag_Manager_Helper","form":null,"config":null,"props":[],"__typename":"Component"}],"grouping":"CUSTOM","__typename":"ComponentTemplate"},"properties":{"config":{"applicablePages":[],"description":"Helper widget to inject Tag Manager scripts into head element","fetchedContent":null,"__typename":"ComponentConfiguration"},"props":[],"__typename":"ComponentProperties"},"form":null,"__typename":"Component","localOverride":false},"globalCss":null,"form":null},"localOverride":false},"CachedAsset:component:custom.widget.Consent_Blackbar-en-1742463858459":{"__typename":"CachedAsset","id":"component:custom.widget.Consent_Blackbar-en-1742463858459","value":{"component":{"id":"custom.widget.Consent_Blackbar","template":{"id":"Consent_Blackbar","markupLanguage":"HTML","style":null,"texts":null,"defaults":{"config":{"applicablePages":[],"description":"","fetchedContent":null,"__typename":"ComponentConfiguration"},"props":[],"__typename":"ComponentProperties"},"components":[{"id":"custom.widget.Consent_Blackbar","form":null,"config":null,"props":[],"__typename":"Component"}],"grouping":"TEXTHTML","__typename":"ComponentTemplate"},"properties":{"config":{"applicablePages":[],"description":"","fetchedContent":null,"__typename":"ComponentConfiguration"},"props":[],"__typename":"ComponentProperties"},"form":null,"__typename":"Component","localOverride":false},"globalCss":null,"form":null},"localOverride":false},"CachedAsset:text:en_US-components/community/Breadcrumb-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/community/Breadcrumb-1743097581414","value":{"navLabel":"Breadcrumbs","dropdown":"Additional parent page navigation"},"localOverride":false},"CachedAsset:text:en_US-components/tags/TagsHeaderWidget-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/tags/TagsHeaderWidget-1743097581414","value":{"tag":"{tagName}","topicsCount":"{count} {count, plural, one {Topic} other {Topics}}"},"localOverride":false},"CachedAsset:text:en_US-components/messages/MessageListForNodeByRecentActivityWidget-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/messages/MessageListForNodeByRecentActivityWidget-1743097581414","value":{"title@userScope:other":"Recent Content","title@userScope:self":"Contributions","title@board:FORUM@userScope:other":"Recent Discussions","title@board:BLOG@userScope:other":"Recent Blogs","emptyDescription":"No content to show","MessageListForNodeByRecentActivityWidgetEditor.nodeScope.label":"Scope","title@instance:1706288370055":"Content Feed","title@instance:1743095186784":"Most Recent Updates","title@instance:1704317906837":"Content Feed","title@instance:1743095018194":"Most Recent Updates","title@instance:1702668293472":"Community Feed","title@instance:1743095117047":"Most Recent Updates","title@instance:1704319314827":"Blog Feed","title@instance:1743095235555":"Most Recent Updates","title@instance:1704320290851":"My Contributions","title@instance:1703720491809":"Forum Feed","title@instance:1743095311723":"Most Recent Updates","title@instance:1703028709746":"Group Content Feed","title@instance:VTsglH":"Content Feed"},"localOverride":false},"Category:category:Forums":{"__typename":"Category","id":"category:Forums","categoryPolicies":{"__typename":"CategoryPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Forum:board:TechnicalForum":{"__typename":"Forum","id":"board:TechnicalForum","forumPolicies":{"__typename":"ForumPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}},"boardPolicies":{"__typename":"BoardPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Forum:board:WaterCooler":{"__typename":"Forum","id":"board:WaterCooler","forumPolicies":{"__typename":"ForumPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}},"boardPolicies":{"__typename":"BoardPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Tkb:board:DevCentralNews":{"__typename":"Tkb","id":"board:DevCentralNews","tkbPolicies":{"__typename":"TkbPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}},"boardPolicies":{"__typename":"BoardPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Category:category:GroupsCategory":{"__typename":"Category","id":"category:GroupsCategory","categoryPolicies":{"__typename":"CategoryPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Category:category:F5-Groups":{"__typename":"Category","id":"category:F5-Groups","categoryPolicies":{"__typename":"CategoryPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Category:category:CommunityGroups":{"__typename":"Category","id":"category:CommunityGroups","categoryPolicies":{"__typename":"CategoryPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Occasion:board:Events":{"__typename":"Occasion","id":"board:Events","boardPolicies":{"__typename":"BoardPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}},"occasionPolicies":{"__typename":"OccasionPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Idea:board:Suggestions":{"__typename":"Idea","id":"board:Suggestions","boardPolicies":{"__typename":"BoardPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}},"ideaPolicies":{"__typename":"IdeaPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Category:category:CrowdSRC":{"__typename":"Category","id":"category:CrowdSRC","categoryPolicies":{"__typename":"CategoryPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Tkb:board:codeshare":{"__typename":"Tkb","id":"board:codeshare","tkbPolicies":{"__typename":"TkbPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}},"boardPolicies":{"__typename":"BoardPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Tkb:board:communityarticles":{"__typename":"Tkb","id":"board:communityarticles","tkbPolicies":{"__typename":"TkbPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}},"boardPolicies":{"__typename":"BoardPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Tkb:board:security-insights":{"__typename":"Tkb","id":"board:security-insights","tkbPolicies":{"__typename":"TkbPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}},"boardPolicies":{"__typename":"BoardPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Tkb:board:article-series":{"__typename":"Tkb","id":"board:article-series","tkbPolicies":{"__typename":"TkbPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}},"boardPolicies":{"__typename":"BoardPolicies","canReadNode":{"__typename":"PolicyResult","failureReason":null}}},"Conversation:conversation:339884":{"__typename":"Conversation","id":"conversation:339884","topic":{"__typename":"TkbTopicMessage","uid":339884},"lastPostingActivityTime":"2025-02-24T19:51:18.469-08:00","solved":false},"User:user:421692":{"__typename":"User","uid":421692,"login":"Jennifer_Yeung","registrationData":{"__typename":"RegistrationData","status":null},"deleted":false,"avatar":{"__typename":"UserAvatar","url":"https://community.f5.com/t5/s/zihoc95639/images/dS00MjE2OTItMjM4NjlpMkQ2RjRFMUZDNDMxN0IxRA"},"id":"user:421692"},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtWVU1amF6?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtWVU1amF6?revision=25","title":"pic1.png","associationType":"BODY","width":2782,"height":1392,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtYXEzVmNL?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtYXEzVmNL?revision=25","title":"new-pic2.png","associationType":"BODY","width":2932,"height":1472,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtSmJNZ3Ry?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtSmJNZ3Ry?revision=25","title":"pic3.png","associationType":"BODY","width":1343,"height":751,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtSVJiZ3dK?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtSVJiZ3dK?revision=25","title":"pic4.png","associationType":"BODY","width":1098,"height":661,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtTHA2SDc0?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtTHA2SDc0?revision=25","title":"pic5.png","associationType":"BODY","width":1032,"height":265,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtdTFySzhB?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtdTFySzhB?revision=25","title":"pic6.png","associationType":"BODY","width":1289,"height":380,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtN1dLVFhZ?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtN1dLVFhZ?revision=25","title":"pic7.png","associationType":"BODY","width":1315,"height":736,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtS3RzbXJl?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtS3RzbXJl?revision=25","title":"pic8.png","associationType":"BODY","width":1211,"height":744,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtc0l2OTBl?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtc0l2OTBl?revision=25","title":"new-pic10.png","associationType":"BODY","width":2306,"height":1119,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtNGlPcnZQ?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtNGlPcnZQ?revision=25","title":"new-pic11.png","associationType":"BODY","width":1917,"height":1060,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtbUtVeVYz?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtbUtVeVYz?revision=25","title":"new-pic12.png","associationType":"BODY","width":2460,"height":1262,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtRVBsOVg3?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtRVBsOVg3?revision=25","title":"pic13.png","associationType":"BODY","width":2860,"height":1404,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtdUJkT0V0?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtdUJkT0V0?revision=25","title":"pic14.png","associationType":"BODY","width":2534,"height":416,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtd2YxMEQ3?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtd2YxMEQ3?revision=25","title":"pic15.png","associationType":"BODY","width":3178,"height":1176,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtZDF2R25C?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtZDF2R25C?revision=25","title":"pic16-new.png","associationType":"BODY","width":3074,"height":574,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtdGV0Sk53?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtdGV0Sk53?revision=25","title":"pic17-new.png","associationType":"BODY","width":1920,"height":892,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtZjdZOEU3?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtZjdZOEU3?revision=25","title":"pic18.png","associationType":"BODY","width":2762,"height":1580,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtV2xodXRO?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtV2xodXRO?revision=25","title":"pic17.png","associationType":"BODY","width":998,"height":720,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtS2ZzOW5O?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtS2ZzOW5O?revision=25","title":"pic18.png","associationType":"BODY","width":1730,"height":249,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQta2xHUGlO?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQta2xHUGlO?revision=25","title":"pic19.png","associationType":"BODY","width":1018,"height":851,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtbmp0UFR4?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtbmp0UFR4?revision=25","title":"pic22.png","associationType":"BODY","width":2754,"height":1646,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtZU05eG1o?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtZU05eG1o?revision=25","title":"pic21-a.png","associationType":"BODY","width":1568,"height":607,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtWE1saWtw?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtWE1saWtw?revision=25","title":"pic21.png","associationType":"BODY","width":1197,"height":417,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtTllHYk9Z?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtTllHYk9Z?revision=25","title":"pic24.png","associationType":"BODY","width":2968,"height":466,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtclVkT3E3?revision=25\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtclVkT3E3?revision=25","title":"pic25.png","associationType":"BODY","width":1814,"height":366,"altText":""},"TkbTopicMessage:message:339884":{"__typename":"TkbTopicMessage","subject":"Secure AI RAG using F5 Distributed Cloud in Red Hat OpenShift AI and NetApp ONTAP Environment","conversation":{"__ref":"Conversation:conversation:339884"},"id":"message:339884","revisionNum":25,"uid":339884,"depth":0,"board":{"__ref":"Tkb:board:TechnicalArticles"},"author":{"__ref":"User:user:421692"},"teaser@stripHtml({\"removeProcessingText\":true,\"truncateLength\":-1})":"","introduction":"","metrics":{"__typename":"MessageMetrics","views":295},"postTime":"2025-02-24T19:51:18.469-08:00","lastPublishTime":"2025-02-24T19:51:18.469-08:00","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})":" Introduction \n Retrieval Augmented Generation (RAG) is a powerful technique that allows Large Language Models (LLMs) to access information beyond their training data. The “R” in RAG refers to the data retrieval process, where the system retrieves relevant information from an external knowledge base based on the input query. Next, the “A” in RAG represents the augmentation of context enrichment, as the system combines the retrieved relevant information and the input query to create a more comprehensive prompt for the LLM. Lastly, the “G” in RAG stands for response generation, where the LLM generates a response with a more contextually accurate output based on the augmented prompt as a result.  \n RAG is becoming increasingly popular in enterprise AI applications due to its ability to provide more accurate and contextually relevant responses to a wide range of queries. However, deploying RAG can introduce complexity due to its components being located in different environments. For instance, the datastore or corpus, which is a collection of data, is typically on-premise for enhanced control over data access and management due to data security, governance, and compliance with regulations within the enterprise. Meanwhile, inference services are often deployed in the cloud for their scalability and cost-effectiveness.   \n In this article, we will discuss how F5 Distributed Cloud can simplify the complexity and securely connect all RAG components seamlessly for enterprise RAG-enabled AI applications deployments. Specifically, we will focus on Network Connect, App Connect, and Web App & API Protection. We will demonstrate how these F5 Distributed Cloud features can be leveraged to secure RAG in collaboration with Red Hat OpenShift AI and NetApp ONTAP. \n Example Topology \n \n   \n F5 Distributed Cloud Network Connect  \n F5 Distributed Cloud Network Connect enables seamless and secure network connectivity across hybrid and multicloud environments. By deploying F5 Distributed Cloud Customer Edge (CE) at site, it allows us to easily establish encrypted site-to-site connectivity across on-premises, multi-cloud, and edge environment.     \n Jensen Huang, CEO of NVIDIA, has said that \"Nearly half of the files in the world are stored on-prem on NetApp.”.  In our example, enterprise data stores are deployed on NetApp ONTAP in a data center in Seattle managed by organization B (Segment-B: s-gorman-production-segment), while RAG services, including embedding Large Language Model (LLM) and vector database, is deployed on-premise on a Red Hat OpenShift cluster in a data center in California managed by Organization A (Segment-A: jy-ocp). By leveraging F5 Distributed Cloud Network Connect, we can quickly and easily establish a secure connection for seamless and efficient data transfer from the enterprise data stores to RAG services between these two segments only: \n \n F5 Distributed Cloud CE can be deployed as a virtual machine (VM) or as a pod on a Red Hat OpenShift cluster. In California, we deploy the CE as a VM using Red Hat OpenShift Virtualization —  click here to find out more on Deploying F5 Distributed Cloud Customer Edge in Red Hat OpenShift Virtualization: \n \n Segment-A: jy-ocp on CE in California and Segment-B: s-gorman-production-segment on CE in Seattle: \n \n Simply and securely connect Segment-A: jy-ocp and Segment-B: s-gorman-production-segment only,  using Segment Connector: \n \n NetApp ONTAP in Seattle has a LUN named “tbd-RAG”, which serves as the enterprise data store in our demo setup and contains a collection of data. After these two data centers are connected using F5 XC Network Connect, a secure encrypted end-to-end connection is established between them. In our example, “test-ai-tbd” is in the data center in California where it hosts the RAG services, including embedding Large Language Model (LLM) and vector database, and it can now successfully connect to the enterprise data stores on NetApp ONTAP in the data center in Seattle: \n \n F5 Distributed Cloud App Connect  \n F5 Distributed Cloud App Connect securely connects and delivers distributed applications and services across hybrid and multicloud environments. By utilizing F5 Distributed Cloud App Connect, we can direct the inference traffic through F5 Distributed Cloud's security layers to safeguard our inference endpoints.  \n \n Red Hat OpenShift on Amazon Web Services (ROSA) is a fully managed service that allows users to develop, run, and scale applications in a native AWS environment. We can host our inference service on ROSA so that we can leverage the scalability, cost-effectiveness, and numerous benefits of AWS’s managed infrastructure services. For instance, we can host our inference service on ROSA by deploying Ollama with multiple AI/ML models: \n \n Or, we can enable Model Serving on Red Hat OpenShift AI (RHOAI). Red Hat OpenShift AI (RHOAI) is a flexible and scalable AI/ML platform builds on the capabilities of Red Hat OpenShift that facilitates collaboration among data scientists, engineers, and app developers. This platform allows them to serve, build, train, deploy, test, and monitor AI/ML models and applications either on-premise or in the cloud, fostering efficient innovation within organizations. In our example, we use Red Hat OpenShift AI (RHOAI) Model Serving on ROSA for our inference service: \n \n Once inference service is deployed on ROSA, we can utilize F5 Distributed Cloud to secure our inference endpoint by steering the inference traffic through F5 Distributed Cloud's security layers, which offers an extensive suite of features designed specifically for the security of modern AI/ML inference endpoints. This setup would allow us to scrutinize requests, implement policies for detected threats, and protect sensitive datasets before they reach the inferencing service hosted within ROSA. In our example, we setup a F5 Distributed Cloud HTTP Load Balancer (rhoai-llm-serving.f5-demo.com), and we advertise it to the CE in the datacenter in California only: \n \n We now reach our Red Hat OpenShift AI (RHOAI) inference endpoint through F5 Distributed Cloud: \n \n F5 Distributed Cloud Web App & API Protection  \n F5 Distributed Cloud Web App & API Protection provides comprehensive sets of security features, and uniform observability and policy enforcement to protect apps and APIs across hybrid and multicloud environments. We utilize F5 Distributed Cloud App Connect to steer the inference traffic through F5 Distributed Cloud to secure our inference endpoint. In our example, we protect our Red Hat OpenShift AI (RHOAI) inference endpoint by rate-limiting the access, so that we can ensure no single client would exhaust the inference service: \n \n A \"Too Many Requests\" is received in the response when a single client repeatedly requests access to the inference service at a rate higher than the configured threshold: \n \n This is just one of the many security features to protect our inference service. Click here to find out more on Securing Model Serving in Red Hat OpenShift AI (on ROSA) with F5 Distributed Cloud API Security.  \n Demonstration   \n In a real-world scenario, the front-end application could be hosted on the cloud, or hosted at the edge, or served through F5 Distributed Cloud, offering flexible alternatives for efficient application delivery based on user preferences and specific needs. To illustrate how all the discussed components work seamlessly together, we simplify our example by deploying Open WebUI as the front-end application on the Red Hat OpenShift cluster in the data center in California, which includes RAG services. While a DPU or GPU could be used for improved performance, our setup utilizes a CPU for inferencing tasks.  \n We connect our app to our enterprise data stores deployed on NetApp ONTAP in the data center in Seattle using F5 Distributed Cloud Network Connect, where we have a copy of \"Chapter 1. About the Migration Toolkit for Virtualization\" from Red Hat. These documents are processed and saved to the Vector DB: \n \n Our embedding Large Language Model (LLM) is Sentence-Transformers/all-MiniLM-L6-v2, and here is our RAG template: \n \n Instead of connecting to the inference endpoint on Red Hat OpenShift AI (RHOAI) on ROSA directly, we connect to the F5 Distributed Cloud HTTP Load Balancer (rhoai-llm-serving.f5-demo.com) from F5 Distributed Cloud App Connect: \n \n Previously, we asked, \"What is MTV?“ and we never received a response related to Red Hat Migration Toolkit for Virtualization: \n \n Now, let's try asking the same question again with RAG services enabled:  \n \n We finally received the response we had anticipated.  \n Next, we use F5 Distributed Cloud Web App & API Protection to safeguard our Red Hat OpenShift AI (RHOAI) inference endpoint on ROSA by rate-limiting the access, thus preventing a single client from exhausting the inference service:  \n \n As expected, we received \"Too Many Requests\" in the response on our app upon requesting the inference service at a rate greater than the set threshold: \n \n With F5 Distributed Cloud's real-time observability and security analytics from the F5 Distributed Console, we can proactively monitor for potential threats. For example, if necessary, we can block a client from accessing the inference service by adding it to the Blocked Clients List: \n \n As expected, this specific client is now unable to access the inference service: \n \n Summary \n Deploying and securing RAG for enterprise RAG-enabled AI applications in a multi-vendor, hybrid, and multi-cloud environment can present complex challenges. In collaboration with Red Hat OpenShift AI (RHOAI) and NetApp ONTAP, F5 Distributed Cloud provides an effortless solution that secures RAG components seamlessly for enterprise RAG-enabled AI applications. ","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})@stringLength":"10043","kudosSumWeight":1,"repliesCount":0,"readOnly":false,"images":{"__typename":"AssociatedImageConnection","edges":[{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtWVU1amF6?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDI","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtYXEzVmNL?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDM","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtSmJNZ3Ry?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDQ","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtSVJiZ3dK?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDU","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtTHA2SDc0?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDY","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtdTFySzhB?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDc","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtN1dLVFhZ?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDg","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtS3RzbXJl?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDk","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtc0l2OTBl?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDEw","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtNGlPcnZQ?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDEx","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtbUtVeVYz?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDEy","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtRVBsOVg3?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDEz","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtdUJkT0V0?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE0","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtd2YxMEQ3?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE1","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtZDF2R25C?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE2","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtdGV0Sk53?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE3","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtZjdZOEU3?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE4","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtV2xodXRO?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE5","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtS2ZzOW5O?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDIw","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQta2xHUGlO?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDIx","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtbmp0UFR4?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDIy","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtZU05eG1o?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDIz","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtWE1saWtw?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDI0","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtTllHYk9Z?revision=25\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDI1","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk4ODQtclVkT3E3?revision=25\"}"}}],"totalCount":28,"pageInfo":{"__typename":"PageInfo","hasNextPage":true,"endCursor":"MjUuMnwyLjF8b3wyNXxfTlZffDI1","hasPreviousPage":false,"startCursor":null}},"videos":{"__typename":"VideoConnection","edges":[],"totalCount":0,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}}},"Conversation:conversation:339207":{"__typename":"Conversation","id":"conversation:339207","topic":{"__typename":"TkbTopicMessage","uid":339207},"lastPostingActivityTime":"2025-02-13T08:33:04.946-08:00","solved":false},"User:user:171064":{"__typename":"User","uid":171064,"login":"Foo-Bang_Chan","registrationData":{"__typename":"RegistrationData","status":null},"deleted":false,"avatar":{"__typename":"UserAvatar","url":"https://community.f5.com/t5/s/zihoc95639/images/dS0xNzEwNjQtSzhEcmtx?image-coordinates=62%2C0%2C1665%2C1603"},"id":"user:171064"},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzkyMDctMlNBbnVx?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzkyMDctMlNBbnVx?revision=7","title":"AI Security Control.png","associationType":"BODY","width":3100,"height":1606,"altText":""},"TkbTopicMessage:message:339207":{"__typename":"TkbTopicMessage","subject":"F5 AI Gateway - Secure, Deliver and Optimize GenAI Apps","conversation":{"__ref":"Conversation:conversation:339207"},"id":"message:339207","revisionNum":7,"uid":339207,"depth":0,"board":{"__ref":"Tkb:board:TechnicalArticles"},"author":{"__ref":"User:user:171064"},"teaser@stripHtml({\"removeProcessingText\":true,\"truncateLength\":-1})":" AI has revolutionized industries by automating tasks, enabling data-driven decisions, and enhancing efficiency and innovation. While it offers businesses a competitive edge by streamlining operations and improving customer experiences, it also introduces risks such as security vulnerabilities, data breaches, and cost challenges. Businesses must adopt robust cybersecurity measures and carefully manage AI investments to balance benefits with risks. F5 provides comprehensive controls to protect AI and IT infrastructures, ensuring sustainable growth in an AI-driven world. Welcome to F5 AI Gateway - a runtime security and traffic governance solution ","introduction":"","metrics":{"__typename":"MessageMetrics","views":705},"postTime":"2025-01-27T05:00:00.040-08:00","lastPublishTime":"2025-01-27T05:00:00.040-08:00","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})":" Introduction \n Artificial Intelligent (AI) has transformed the way we live and work, revolutionising industries and enhancing everyday life. By automating repetitive tasks and enabling data-driven decision-making, AI has unlocked unprecedented levels of innovation, efficiency, and productivity. For businesses, AI offers a significant competitive edge, streamlining operations, enhancing customer experiences, and driving growth. However, with great opportunities comes great risks. AI introduces security vulnerabilities, including potential misuse, data breaches, and expanded attack surfaces. It is essential for businesses to recognize these challenges and adopt robust cybersecurity measures to mitigate risks. Moreover, AI is an investment-intensive technology, and its return on investment remains difficult to quantify (at least as of this writing). Without careful oversight and optimization, AI budgets can quickly spiral out of control. Embracing AI responsibly—balancing its benefits with its risks—ensures sustainable growth and resilience in an increasingly AI-driven world. \n At F5, we offer a wide range of controls to safeguard your AI infrastructure, applicable not only to AI systems but to all IT environments.  \n \n In this article and video demo, we will focus on the AI Gateway (AI GW), which provides runtime security and traffic governance. It’s worth noting that other controls, such as securing APIs—the backbone of any AI service—are equally critical.  \n The F5 AI Gateway is designed to address specific challenges, such as those outlined in the OWASP Top 10 for LLM Applications. It features an extensible architecture with purpose-built processors and plugins, enhanced by a Software Development Kit (SDK) for custom use cases. \n Here are those demo videos on F5 AI Gateways. \n   \n Traffic Management and Optimisation of LLM \n AI Gateway function is more than just a security control tools. It play an important roles on cost optimization and traffic management. This video to demonstrate the value and capabilities of F5 AI Gateway on traffic management and optimization of LLM traffic to optimize resources, reduces costs, enhances user experience, and safeguards data privacy. \n \n   \n Securing LLM Applications \n Demonstrate F5 AI GW capabilities to address some of the key concern raised in OWASP TOP 10 for LLM apps by leveraging native in-build AI processor. \n \n   \n Specific individual Demo Video. \n Please do note that the following individual specific demo video is an excerpt of the overall Traffic management, Optimization and Security demo. \n   \n Identity Aware Model Routing \n Demonstrate the capability of F5 AI Gateway to dynamically apply and steer LLM traffic policy according to user identity based on JSON Web Token (JWT) or header. \n \n   \n Language and Code Detection Routing \n Demonstrate the capability of F5 AI Gateway to detect type of language input and steer according to the defined language model for better user experience and cost optimization. It also demonstrate code detection and steer the traffic to a local self-hosted model to prevent any unintentional code leakage to a SaaS-Managed model. \n \n   \n System Prompt Enforcement (Guardrail) \n Showcase the system prompt capabilities of the F5 AI Gateway (AI GW) to ensure conversations remain contextually relevant. The AI Gateway enforces strict guardrails to prevent users from deviating from the intended purpose of the chatbot. This helps maintain the integrity of the system while preventing the misuse of AI resources \n \n   \n OWASP Top 10 LLM01 - Prompt Injection \n Showcase Prompt Injection processor to address few key concern raised in OWASP TOP 10 for LLM apps especially on Prompt Injection concern. (LLM01) \n \n   \n OWASP Top 10 LLM02 - Sensitive Information Disclosure (PII) \n Showcase PII Redactor processor to address key concern raise on sensitive information disclosure raise in OWASP TOP 10 for LLM apps (LLM02) \n \n   \n OWASP Top 10 LLM07 - System Prompt Leakage \n Showcase System Prompt processor to address key concern raise on system prompt leakage raise in OWASP TOP 10 for LLM apps (LLM07) \n \n Please reach out to F5 representative for further deep-dive information. ","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})@stringLength":"4286","kudosSumWeight":5,"repliesCount":1,"readOnly":false,"images":{"__typename":"AssociatedImageConnection","edges":[{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzkyMDctMlNBbnVx?revision=7\"}"}}],"totalCount":1,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}},"videos":{"__typename":"VideoConnection","edges":[],"totalCount":0,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}}},"Conversation:conversation:339571":{"__typename":"Conversation","id":"conversation:339571","topic":{"__typename":"TkbTopicMessage","uid":339571},"lastPostingActivityTime":"2025-02-13T05:00:00.033-08:00","solved":false},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk1NzEtWkdDSmFx?revision=5\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk1NzEtWkdDSmFx?revision=5","title":"7-security-control.png","associationType":"BODY","width":3228,"height":1642,"altText":""},"TkbTopicMessage:message:339571":{"__typename":"TkbTopicMessage","subject":"Agentic RAG - Securing GenAI with F5 Distributed Cloud Services","conversation":{"__ref":"Conversation:conversation:339571"},"id":"message:339571","revisionNum":5,"uid":339571,"depth":0,"board":{"__ref":"Tkb:board:TechnicalArticles"},"author":{"__ref":"User:user:171064"},"teaser@stripHtml({\"removeProcessingText\":true,\"truncateLength\":-1})":" Agentic RAG (Retrieval-Augmented Generation) enhances the capabilities of a GenAI chatbot by integrating dynamic knowledge retrieval into its conversational abilities, making it more context-aware and accurate. In this demo, I will focus on security aspect of the solution. This demonstration will highlight the various security measures implemented and enforced in our AI reference architecture for this Agentic RAG. F5 is a trusted leader in security, with a track record of delivering robust solutions for securing applications and networks. Recognized by many independent evaluations as a Leader in Web Application and API Security from IDC, SC Award, TrustRadius, EMA, and many more, F5 exemplifies excellence and innovation. These endorsements affirm F5’s expertise, reassuring organizations that their digital assets are protected by a capable, reputable partner that keeps pace with evolving security needs. ","introduction":"","metrics":{"__typename":"MessageMetrics","views":193},"postTime":"2025-02-13T05:00:00.033-08:00","lastPublishTime":"2025-02-13T05:00:00.033-08:00","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})":" In my previous article—Enhance your GenAI chatbot with the power of Agentic RAG and F5 platform, I discussed enhancing your Generative AI (GenAI) chatbot using the power of Agentic Retrieval-Augmented Generation (RAG) and F5 platform. That article provided a high-level overview of how Agent-Based RAG (or Agentic RAG) operates and detailed the pipeline that powers it. In this follow-up article, I will demonstrate how to secure an Agentic RAG, building on the foundational concepts introduced earlier. \n Securing Agentic AI—or GenAI in general—requires implementing multiple layers of security controls. It is important to note that GenAI applications are a modern application, leveraging contemporary application frameworks and methodologies (in contrast to traditional monolithic applications). These applications are typically distributed, often running on containerized platforms such as Kubernetes. Due to their distributed, microservices-based architecture, GenAI applications rely heavily on APIs, which form the backbone of GenAI services. \n To ensure comprehensive security, traditional controls such as Web Application and API Protection (WAAP), bot management, advanced API security measures, and adherence to security best practices remain essential. However, to address the unique challenges of GenAI applications, purpose-built solutions like AI Gateways, which focus on AI runtime security and traffic governance, are crucial \n \n In this article, we will explore six key security controls designed to secure AI services. Securing AI services with AI Gateway has been shown in this article - F5 AI Gateway - Secure, Deliver and Optimize GenAI Apps \n Securing GenAI with F5 Distributed Cloud ServicesDEMO \n Please feel reach out to F5 if you need any further deep-dive discussion. \n ","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})@stringLength":"1805","kudosSumWeight":1,"repliesCount":0,"readOnly":false,"images":{"__typename":"AssociatedImageConnection","edges":[{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzk1NzEtWkdDSmFx?revision=5\"}"}}],"totalCount":1,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}},"videos":{"__typename":"VideoConnection","edges":[],"totalCount":0,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}}},"Conversation:conversation:338353":{"__typename":"Conversation","id":"conversation:338353","topic":{"__typename":"TkbTopicMessage","uid":338353},"lastPostingActivityTime":"2025-01-02T16:09:26.863-08:00","solved":false},"User:user:326558":{"__typename":"User","uid":326558,"login":"Eric_Ji","registrationData":{"__typename":"RegistrationData","status":null},"deleted":false,"avatar":{"__typename":"UserAvatar","url":"https://community.f5.com/t5/s/zihoc95639/images/dS0zMjY1NTgtaDBOSjVu?image-coordinates=0%2C0%2C1110%2C1110"},"id":"user:326558"},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtTXJvZ2o4?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtTXJvZ2o4?revision=60","title":"image.png","associationType":"BODY","width":2325,"height":1011,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtOE9BVFdX?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtOE9BVFdX?revision=60","title":"image.png","associationType":"BODY","width":2412,"height":1386,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtWDZYT0JG?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtWDZYT0JG?revision=60","title":"image.png","associationType":"BODY","width":2013,"height":1210,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtZGV6OEJ4?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtZGV6OEJ4?revision=60","title":"image.png","associationType":"BODY","width":1722,"height":1102,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtd0t4N0FC?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtd0t4N0FC?revision=60","title":"image.png","associationType":"BODY","width":2564,"height":1630,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtdGZMc1lQ?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtdGZMc1lQ?revision=60","title":"image.png","associationType":"BODY","width":1726,"height":1134,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtb3lGSkxH?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtb3lGSkxH?revision=60","title":"image.png","associationType":"BODY","width":857,"height":965,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMteHpyb0kx?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMteHpyb0kx?revision=60","title":"image.png","associationType":"BODY","width":1379,"height":620,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtYUVPSVJS?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtYUVPSVJS?revision=60","title":"image (3).png","associationType":"BODY","width":1348,"height":636,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtY1NqeDJj?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtY1NqeDJj?revision=60","title":"image (4).png","associationType":"BODY","width":540,"height":435,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtTjAxZG1E?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtTjAxZG1E?revision=60","title":"image.png","associationType":"BODY","width":2004,"height":1054,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMteWRUcTVk?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMteWRUcTVk?revision=60","title":"image.png","associationType":"BODY","width":3410,"height":2222,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtQzBuYmhU?revision=60\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtQzBuYmhU?revision=60","title":"image.png","associationType":"BODY","width":858,"height":650,"altText":""},"TkbTopicMessage:message:338353":{"__typename":"TkbTopicMessage","subject":"Securing Model Serving in Red Hat OpenShift AI (on ROSA) with F5 Distributed Cloud API Security","conversation":{"__ref":"Conversation:conversation:338353"},"id":"message:338353","revisionNum":60,"uid":338353,"depth":0,"board":{"__ref":"Tkb:board:TechnicalArticles"},"author":{"__ref":"User:user:326558"},"teaser@stripHtml({\"removeProcessingText\":true,\"truncateLength\":-1})":" Learn how Red Hat OpenShift AI on ROSA and F5 Distributed Cloud API Security work together to protect generative AI model inference endpoints. This integration ensures robust API discovery, schema enforcement, LLM-aware threat detection, bot mitigation, sensitive data redaction, and continuous observability—enabling secure, compliant, and high-performance AI-driven experiences at scale. ","introduction":"","metrics":{"__typename":"MessageMetrics","views":777},"postTime":"2024-12-18T12:01:43.571-08:00","lastPublishTime":"2024-12-18T12:01:43.571-08:00","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})":" As enterprises embrace Generative AI—particularly deploying large language models (LLMs) and other foundational AI models—production environments become increasingly complex. Organizations need an end-to-end MLOps platform that streamlines the entire lifecycle: developing, training, fine-tuning, and especially serving models at scale. \n Red Hat OpenShift AI (OpenShift AI) meets this need by providing a comprehensive, hybrid MLOps environment. When deployed on Red Hat OpenShift Service on AWS (ROSA), OpenShift AI takes advantage of AWS’s managed infrastructure services and cloud-native elasticity, enabling organizations to scale AI/ML workflows efficiently and cost-effectively. \n Yet, as models are served to end-users or integrated into downstream applications, security considerations become paramount. Inference endpoints may be targeted for unauthorized access, data exfiltration, or prompt manipulation. F5 Distributed Cloud addresses these challenges by offering robust capabilities—API discovery, schema enforcement, LLM-aware threat detection, bot mitigation, sensitive data redaction, and continuous observability—to ensure that inference endpoints remain secure, compliant, and high-performing. \n In this post, we will: \n \n Introduce OpenShift AI and its capabilities for AI/ML workloads with an emphasis on model serving. \n Discuss how running OpenShift AI on ROSA leverages AWS services for scalable, cost-effective AI/ML operations. \n Show how F5 Distributed Cloud API Security enhances the security posture of generative AI model inference endpoints—covering automated API discovery, schema enforcement, threat detection, rate limiting, and compliance measures. \n Demonstrate how to integrate these capabilities end-to-end, using Ollama Mistral-7B as an example model. \n \n What is Red Hat OpenShift AI for Generative AI Applications? \n Red Hat OpenShift AI is a hybrid MLOps platform designed to simplify the entire AI lifecycle. It empowers teams to train, fine-tune, deploy, serve, monitor, and continuously improve generative AI models. By merging IT operations, data science workflows, and application development practices into one platform, OpenShift AI accelerates innovation, fosters governance, and encourages collaboration—essential ingredients for delivering enterprise-grade AI solutions. \n Key Features: \n \n Hybrid MLOps Platform: Unified environment for data scientists, developers, and operators, whether you run on-premises, in the cloud, or in a hybrid setup. \n Distributed Workloads & Fine-Tuning: Scale model training and fine-tuning across distributed compute frameworks, adapting large language models to your domain’s requirements. \n Model Serving & Monitoring: Deploy and serve models at scale using technologies like KServe, ModelMesh, and a variety of specialized runtimes (OpenVINO™ Model Server, Caikit/TGIS, NVIDIA Triton Inference Server, etc.). Continuously monitor model performance, detect drift, and ensure ongoing model quality. \n Lifecycle Management & DevOps Integration: Seamlessly integrate data science pipelines with CI/CD workflows. Automate model deployment, rollout new versions safely, and achieve consistent delivery of AI-driven features. \n Enhanced Collaboration: Enable data scientists, developers, and IT Ops to work together using notebooks (JupyterLab), popular frameworks (TensorFlow, PyTorch), and unified governance, speeding up innovation and time-to-value. \n \n High-Level Architecture: OpenShift AI integrates workbenches, distributed workloads, data science pipelines, serving engines, and monitoring tools atop Kubernetes and OpenShift operators, leveraging GitOps, pipelines, service mesh, and serverless technologies. \n \n Figure 1: OpenShift AI High-Level Architecture \n Running OpenShift AI on ROSA for Scalable AI/ML Solutions \n Red Hat OpenShift Service on AWS (ROSA) brings a fully managed OpenShift environment to AWS. This allows teams to focus on building and serving AI models rather than managing infrastructure. \n Key advantages: \n \n Scalability: Seamlessly scale GPU-accelerated compute, storage, and networking resources as model serving workloads grow. \n Cost Efficiency & On-Demand Resources: Leverage Amazon EC2 instances, Amazon S3, and other AWS services only as needed, paying as you go. \n Unified Management: Offload cluster operations and lifecycle management to ROSA, ensuring reliable operations and freeing your team to concentrate on AI innovation. \n \n Security Challenges in Model Serving for Generative AI \n While OpenShift AI and ROSA simplify operations, serving AI models still raises critical security concerns: \n \n Unauthorized Access & Data Leakage: External requests to inference endpoints may attempt to extract proprietary knowledge or sensitive data from the model. \n Prompt Injection & Malicious Content: LLMs can be tricked into producing harmful or confidential outputs if the prompts are manipulated. \n Bot Attacks & Performance Risks: Automated scripts can overwhelm inference endpoints, degrade performance, or escalate costs. \n Compliance & Sensitive Data Handling: AI outputs can contain PII or regulated data, necessitating encryption, redaction, and audit trails to meet compliance demands. \n Evolving Threat Landscape: The complexity and dynamism of AI models and APIs call for continuous posture management and adaptive threat detection. \n \n Enhancing Security with F5 Distributed Cloud API Security \n F5 Distributed Cloud provides a comprehensive set of capabilities tailored for securing modern AI inference endpoints. By integrating these capabilities with OpenShift AI deployments on ROSA, organizations gain: \n \n Automated API Discovery & Posture Management:\n \n Identify all inference endpoints automatically, eliminating hidden APIs. \n Enforce schemas based on observed traffic to ensure requests and responses follow expected patterns. \n Integrate “shift-left” security checks into CI/CD pipelines, catching misconfigurations before production. \n \n \n LLM-Aware Threat Detection & Request Validation:\n \n Detect attempts to manipulate prompts or break compliance rules, ensuring suspicious requests are blocked before reaching the model. \n \n \n Bot Mitigation & Adaptive Rate Controls:\n \n Differentiate between legitimate users and bots or scrapers, blocking automated attacks. \n Dynamically adjust rate limits and policies based on usage history and real-time conditions, maintaining performance and reliability. \n \n \n Sensitive Data Redaction & Compliance:\n \n Identify and mask PII or sensitive tokens in requests and responses. \n Adhere to data protection regulations and maintain detailed logs for auditing, monitoring, and compliance reporting. \n \n \n Seamless Integration & Observability:\n \n Deploy F5 Distributed Cloud seamlessly alongside OpenShift AI on ROSA without reshuffling existing architecture. \n Use centralized dashboards and analytics to monitor real-time metrics—latency, error rates, compliance indicators—to continuously refine policies and adapt to emerging threats. \n \n \n \n Example: Working with Multiple OLLAMA Models and Programmatic Inference \n In this scenario, multiple OLLAMA models have been deployed on the OpenShift cluster. For instance: \n sh-5.1$ ollama ls \nNAME ID SIZE MODIFIED \nllama2:7b 78e26419b446 3.8 GB 3 days ago \nllama3.2:1b baf6a787fdff 1.3 GB 2 weeks ago \nmario:latest 7434c42677ab 3.8 GB 12 seconds ago \nmistral:latest f974a74358d6 4.1 GB 11 days ago \norca-mini:latest 2dbd9f439647 2.0 GB About a minute ago\nphi3:latest 4f2222927938 2.2 GB 2 weeks ago \nphi3:mini 4f2222927938 2.2 GB 2 weeks ago \ntinyllama:latest 2644915ede35 637 MB 2 weeks ago \n \n We have a variety of models—ranging from Mistral-7B to tinyllama—that can be served simultaneously. While the environment is currently using CPUs for hosting these models, you could leverage GPUs for better performance in a production scenario. \n Unlike the model training and fine-tuning phase—where we worked directly within OpenShift AI Workbenches (to be covered in future blogs)— in this scenario we’re querying the LLM endpoint from a local application, where Python libraries and LangChain are installed. Instead of hitting the cluster directly, we route traffic through an F5 Distributed Cloud-managed URL (e.g., http://llm01.volt.thebizdevops.net). In a real-world deployment, the frontend could also be hosted on OpenShift or served through F5 Distributed Cloud Regional Edges (RE), providing flexible options for scaling and delivering the application. \n This ensures that all requests pass through F5 Distributed Cloud’s security layers, applying policies, detecting threats, and protecting sensitive data before they reach the LLM endpoint hosted in OpenShift AI on ROSA. \n \n Figure 2: Integrated Architecture with F5 Distributed Cloud and OpenShift AI on AWS \n F5 Distributed Cloud Capabilities in Action \n To illustrate the key F5 Distributed Cloud features, we’ve divided them into distinct capabilities and included screen captures in the F5 Distributed Cloud console. These captures provide a visual reference to understand how each capability enhances the security and compliance posture of your LLM inference endpoints. \n 1. API Discovery & Schema Enforcement \n What It Does: F5 Distributed Cloud automatically identifies all exposed inference endpoints for your AI/LLM models. It then derives schemas from real traffic, enforcing expected request and response formats. By blocking malformed inputs early, your model stays protected, ensuring consistent, reliable, and trustworthy inferences. \n \n Figure 3: API Discovery & Schema Enforcement (Refer to the annotated image above showing multiple discovered endpoints, shadow APIs, and downloadable OpenAPI specifications derived from actual traffic patterns.) \n 2. LLM-Aware Threat Detection & Request Validation \n What It Does: This feature identifies potential threats to your LLM endpoints by enforcing strict OpenAPI-based validation on incoming requests. By catching invalid or suspicious inputs early, you can adjust policies to block them in the future, ensuring that malicious attempts—whether aiming to exploit the LLM’s behavior or break compliance rules—never reach the inference logic. \n \n Figure 4: Security Analysis for a Non-Compliant Request (Here, the request triggered an OpenAPI validation failure. Although currently “allowed,” policies can be easily configured to “block” these events going forward, preventing non-compliant or potentially harmful requests from impacting your LLM models.) \n   \n \n Figure 5: API Inventory Validation Configuration (This image illustrates the corresponding configuration settings for OpenAPI Validation. By validating both requests and responses at multiple layers—headers, body, and content-type—F5 Distributed Cloud ensures that LLM prompts remain safe, compliant, and free from injection attacks.) \n 3. Bot Mitigation & Rate Limiting \n What It Does: Differentiates legitimate user traffic from automated bots or scrapers bot mitigation, and ensures fair usage of resources (rate limiting). By differentiating legitimate requests from bot-driven abuse and enforcing request thresholds, F5 Distributed Cloud protects inference endpoints from performance degradation while maintaining a positive user experience. \n \n Figure 6: Bot Defense in Action (This image shows how F5 Distributed Cloud identifies and classifies automated traffic as “Bad Bots,” blocking them to preserve endpoint availability and prevent resource exhaustion.) \n   \n \n Figure 7: Configuring Rate Limits (This image illustrates the setup of request thresholds for a specific endpoint, ensuring no single client overwhelms the inference service.) \n   \n \n Figure 8: Enforced Rate Limit (429 Too Many Requests) (This image demonstrates a client exceeding the above-configured request limit and receiving a 429 response, confirming that F5 Distributed Cloud’s rate limiting is actively maintaining fair resource allocation.) \n 4. Sensitive Data Redaction & Compliance Logging \n What It Does: Identifies and masks personally identifiable information (PII) or other sensitive data—such as credit card numbers, emails, and phone numbers—within model responses. New Sensitive Data Exposure Rules allow you to customize and enforce policies to block or redact sensitive fields dynamically. This ensures compliance with frameworks like HIPAA, GDPR., and other regulatory mandates while capturing detailed logs for auditing. \n \n Figure 9: Adding Sensitive Data Exposure Rules (This image shows how you can add custom rules to detect and control exposure of sensitive fields, such as card-expiration dates, phone numbers, and credit-card details, ensuring model responses comply with organizational security policies.) \n   \n \n Figure 10: Sensitive Data Detection Across APIs (Here, sensitive data types—like social-security numbers and phone numbers—are automatically detected across API responses. Built-in and custom rules flag potential exposures, empowering teams to enforce redaction and maintain compliance.) \n   \n \n Figure 11: Service Policy for Model Validation (This image shows a service policy in action, blocking an inference request that doesn’t meet defined model validation criteria. Such policies can also be tied to compliance mandates, ensuring non-compliant responses are never returned to clients.) \n   \n \n Figure 12: API Compliance & Sensitive Data Detection (Here, sensitive fields like credit-card or phone-number are automatically identified, and associated compliance frameworks (HIPAA, GDPR) are recognized. This empowers you to enforce data redaction, maintain regulatory compliance, and produce audit-ready logs without revealing sensitive details.) \n 5. Centralized Observability & Continuous Policy Updates \n What It Does: Offers dashboards and analytics tools to monitor request volumes, latency, errors, and compliance metrics across your AI inference endpoints. Security teams can leverage these observations to continuously refine their policies, enhance schema validations, and recalibrate rate limits as threats evolve or model usage grows. \n \n Figure 13: Endpoint-Level Metrics Dashboard (This example shows an LLM endpoint /api/generate with available metrics including error rate, latency, request rate, request size, response size, and throughput. By monitoring these trends, teams can quickly identify performance bottlenecks, detect anomalies, and apply targeted policy changes to maintain optimal efficiency and security.) \n The Outcome: Secure, Compliant, and Performant LLM Serving \n By combining Red Hat OpenShift AI on ROSA with F5 Distributed Cloud, organizations can: \n \n Confidently serve multiple LLMs at scale, handling diverse use cases and workloads. \n Securely expose inference endpoints, ensuring that requests from external clients are validated, sanitized, and protected against prompt injection, unauthorized access, or excessive traffic. \n Maintain compliance and privacy, redacting sensitive data and logging requests for auditing and reporting purposes. \n Continuously adapt to evolving threats, leveraging real-time observability and agile policy management for persistent security improvements. \n \n This powerful combination enables generative AI models to be woven into complex enterprise workflows—such as insurance claims processing—without sacrificing trust, governance, or user satisfaction. \n Conclusion \n Red Hat OpenShift AI on ROSA, bolstered by F5 Distributed Cloud API Security, provides a robust, scalable, and secure foundation for running generative AI workloads in production. Together, they address the nuanced security challenges of exposing LLM inference endpoints to external clients. \n Whether you are working with a single LLM or managing a portfolio of models like OLLAMA’s Mistral, Phi3, and TinyLlama, this combination ensures that your users—connecting from anywhere—can trust the quality, security, and compliance of the AI services they rely on. ","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})@stringLength":"16330","kudosSumWeight":5,"repliesCount":4,"readOnly":false,"images":{"__typename":"AssociatedImageConnection","edges":[{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtTXJvZ2o4?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDI","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtOE9BVFdX?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDM","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtWDZYT0JG?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDQ","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtZGV6OEJ4?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDU","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtd0t4N0FC?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDY","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtdGZMc1lQ?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDc","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtb3lGSkxH?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDg","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMteHpyb0kx?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDk","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtYUVPSVJS?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDEw","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtY1NqeDJj?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDEx","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtTjAxZG1E?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDEy","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMteWRUcTVk?revision=60\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDEz","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzgzNTMtQzBuYmhU?revision=60\"}"}}],"totalCount":13,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}},"videos":{"__typename":"VideoConnection","edges":[],"totalCount":0,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}}},"Conversation:conversation:338825":{"__typename":"Conversation","id":"conversation:338825","topic":{"__typename":"TkbTopicMessage","uid":338825},"lastPostingActivityTime":"2025-01-02T05:30:00.034-08:00","solved":false},"User:user:419633":{"__typename":"User","uid":419633,"login":"Koichi","registrationData":{"__typename":"RegistrationData","status":null},"deleted":false,"avatar":{"__typename":"UserAvatar","url":"https://community.f5.com/t5/s/zihoc95639/images/dS00MTk2MzMtMjUxMTJpODRENkE1RkUxRjBDNkI2QQ"},"id":"user:419633"},"TkbTopicMessage:message:338825":{"__typename":"TkbTopicMessage","subject":"AI Security - LLM-DOS, and predictions of 2025 and beyond","conversation":{"__ref":"Conversation:conversation:338825"},"id":"message:338825","revisionNum":4,"uid":338825,"depth":0,"board":{"__ref":"Tkb:board:TechnicalArticles"},"author":{"__ref":"User:user:419633"},"teaser@stripHtml({\"removeProcessingText\":true,\"truncateLength\":-1})":"","introduction":"","metrics":{"__typename":"MessageMetrics","views":297},"postTime":"2025-01-02T05:30:00.034-08:00","lastPublishTime":"2025-01-02T05:30:00.034-08:00","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})":" Introduction \n Hello again, this article is part of AI security series. I have been discussing AI security along with the OWASP LLM top10. \n LLM01 and LLM02 were discussed in the \"AI Security : Prompt Injection and Insecure Output Handling\", and LLM03 and its basic concepts were discussed in the \"Using ChatGPT for security and introduction of AI security\". In this article, I am going to discuss LLM04. And, since we are almost at the end of the year 2024, I would like to present some discussions and predictions for AI security in 2025 and beyond. \n LLM04: Model Denial of Service \n LLM04 is relatively easy to understand for security engineers who is familiar with conventional cyber attack methods. Denial of Service (DoS) is a common method of cyber attack, in which a large amount of data is given to the server to make it unable to provide services and/or crash. DoS attacks usually aim to exhaust computational resources and block services rather than stealing data, but the disruption they cause can be used as a smokescreen for more malicious activities, such as data breaches or malware installation. \n DOS attack against LLM (LLM-DOS) is same. It aims to exhaust computational resources of DOS (like CPU/GPU usage) and block services (like responding to chat).  LLM-DOS can be done in two ways. One is a simple LLM-DOS attack which is to mass input against the LLM's input, similar to a DOS attack against a server. This method, as described in this article, can deplete the LLM's resources, like CPU/GPU usages. If you call this as a simple DoS attack, in such a scenario would be to instruct the model to keep repeating Hello, but we see that relying only on natural instructions limits the output length, which is limited by the maximum length of the LLM's Supervised Fine-Tuning (SFT) data \n The another method of LLM-DOS is to include code in the input that over-consumes resources. Denial-of-Service Poisoning Attacks on Large Language Models is discussing this. In the paper, this is called as a poisoning-based DoS (P-DoS) attack and it demonstrates that the output length limit can be broken by injecting a single poisoning sample designed for DoS purposes. Experiments reveal that an attacker can easily compromise models such as GPT-4o and GPT-4o mini by injecting a single poisoning sample through the OpenAI API at a minimal cost of less than $1. \n To understand this, it is easier to think about simple programming - for example, if you put an inescapable loop statement in your code, it can hang the computer (in fact, the IDE will warn you before it compiles). And if the network does not have a Spanning Tree Protocol, it will loop and hang the router. So same things happens on prompt injection.  \n When using this idea LLLM-DOS, we must consider that such input should be blacklisted, so the simple way of using inescapable loop is impossible. Also, even if it is possible against WhiteBox, but we do not know what kind of attack is possible in BlackBox. However, according to \"Crabs: Consuming Resource via Auto-generation for LLM-DoS Attack under Black-box Settings\", a Prompt input to the BlackBox can generate multiple sub-prompts (e.g., 25 sub-prompts). Its experiments show that the delay could be increased by a factor of 250. \n Given these serious safety concerns, researchers advocate further research aimed at defending against LLM-DoS threats in custom fine tuning of aligned LLMs. \n What will happen in 2025 and beyond? \n Some news site predicts an intensifying AI arms race in coming year. I would like to share an article on AI security predictions for the coming year and beyond. According to an article by EG Secure solutions, the generative AI makes it possible to create a malware without specialized skills, that makes easier to do cyber attacks. Thus, the article predicted that cyber attacks by malware created by generative AI would increase. The article also points out that LLM-generated applications such as RAGs are being used, but their code may contain vulnerabilities, and that will be another threat in 2025 and beyond. \n McAfee has released \"McAfee Unveils 2025 Cybersecurity Predictions: AI-Powered Scams and Emerging Digital Threats Take Center Stage\". According to the article, cyber attacks by malicious attackers will be highly optimized by generative AI, and the quality of DeepFake and AI-generated images/videos will increase, making it difficult to determine whether they are created by humans or generative AI. Thus it is expected that fake emails generated by generative AI, such as phishing emails, will also become harder to distinguish from real emails. Furthermore, the article points out that  malware which is using (maybe created by) generative AI will become more sophisticated, thereby breaking through conventional security defense systems and may succeed to extracting personal information and sensitive data. \n Finally, the \"Infosec experts divided on AI's potential to assist red teams\" discusses the pros and cons of using generative AI for red teaming, one type of security audit. According to the article, the benefit of using generative AI is that it accelerates threat detection by allowing AI to scour multiple data feeds, applications, and other sources of performance data and run them as part of a larger automated workflow. On the other hand, the article also argues that using generative AI for red teaming is still limited, because the vulnerability discovery process by AI is a black box so the pen-tester cannot explain how they discovered to their clients. ","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})@stringLength":"5587","kudosSumWeight":1,"repliesCount":0,"readOnly":false,"images":{"__typename":"AssociatedImageConnection","edges":[],"totalCount":0,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}},"videos":{"__typename":"VideoConnection","edges":[],"totalCount":0,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}}},"Conversation:conversation:334519":{"__typename":"Conversation","id":"conversation:334519","topic":{"__typename":"TkbTopicMessage","uid":334519},"lastPostingActivityTime":"2024-10-09T10:28:39.575-07:00","solved":false},"User:user:194786":{"__typename":"User","uid":194786,"login":"Janibasha","registrationData":{"__typename":"RegistrationData","status":null},"deleted":false,"avatar":{"__typename":"UserAvatar","url":"https://community.f5.com/t5/s/zihoc95639/images/dS0xOTQ3ODYtMjA5NDJpMEI1Q0JDRDNGRkQ2MUM0Mw"},"id":"user:194786"},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktOUVjdU9S?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktOUVjdU9S?revision=7","title":"no-dataguard-result.png","associationType":"BODY","width":3122,"height":1054,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktZ0hwcUJs?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktZ0hwcUJs?revision=7","title":"image.png","associationType":"BODY","width":1795,"height":1537,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktbU9SbVI1?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktbU9SbVI1?revision=7","title":"owasp-llm-nap.jpeg","associationType":"BODY","width":1670,"height":660,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktRXcwUTBy?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktRXcwUTBy?revision=7","title":"svc.JPG","associationType":"BODY","width":3213,"height":614,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktRXRHaUxX?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktRXRHaUxX?revision=7","title":"llm-ssn-hidden.jpg","associationType":"BODY","width":3663,"height":1709,"altText":""},"TkbTopicMessage:message:334519":{"__typename":"TkbTopicMessage","subject":"Mitigate OWASP LLM Security Risk: Sensitive Information Disclosure Using F5 NGINX App Protect","conversation":{"__ref":"Conversation:conversation:334519"},"id":"message:334519","revisionNum":7,"uid":334519,"depth":0,"board":{"__ref":"Tkb:board:TechnicalArticles"},"author":{"__ref":"User:user:194786"},"teaser@stripHtml({\"removeProcessingText\":true,\"truncateLength\":-1})":" This short WAF security article covered the critical security gaps present in current generative AI applications, emphasizing the urgent need for robust protection measures in LLM design deployments. Finally we also demonstrated how F5 Nginx App Protect v5 offers an effective solution to mitigate the OWASP LLM Top 10 risks. ","introduction":"","metrics":{"__typename":"MessageMetrics","views":330},"postTime":"2024-10-07T05:00:00.027-07:00","lastPublishTime":"2024-10-09T10:28:39.575-07:00","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})":" Introduction: \n This article covers the basics of the rise of AI (Gen AI) and how F5 products can protect these AI backends. The rise of Large Language Models (LLMs) has marked a transformative era in AI, enabling machines to produce and comprehend text with human-like proficiency. These sophisticated models are now integral to applications in customer support, content creation, and even scientific research. However, their advanced capabilities also cause big security worries, especially around accidentally sharing sensitive information. These models can sometimes share private data from their training data. This means we need strong protection systems to reduce these risks. Addressing these challenges, the OWASP LLM Top 10 project has been created to identify and prioritize the most critical security threats associated with LLMs. Out of this top 10, LLM-06 risk specifically focuses on sensitive information disclosure, emphasizing the importance of stringent data handling protocols and privacy safeguards to prevent unintended data leaks to ensure the secure and ethical use of LLM technology. In this article, we are going to see how F5 Nginx App Protect v5 can protect the LLM backends from LLM06: Sensitive Information Disclosure risk. \n   \n Use case: \n We are going to deploy a Gen AI application which takes URL hosting data and is passed on to a backend LLM application. Once data is analyzed by LLM, users can ask questions about this data and LLM will come back with the right answers. We have deployed this application inside AWS EKS cluster and 2 application services are running inside this cluster. Front-end services serve the UI, and the backend hosts the LLM model. How and what of this application with its internal tools and LLM model is not of importance and can find many free tools online. Since this article focusses on LLM06: Sensitive Information Disclosure risk, we will pass a website URL containing some dummy SSN of random users. Once this website data is loaded to LLM, we can ask for the SSN of a user and LLM will return the SSN from this data. SSNs are sensitive information and should always be protected as it will lead to personal data exploitation. In this case, LLM model does not have security rules to find and protect this data. So, it will be directly shown in the response as shown below. \n \n To protect this LLM backend service, we are going to deploy and configure NGINX App Protect Version 5 as a k8s workload in the data path. The latest release of NGINX App Protect v5 has made the WAF process more ridiculously easy, thereby making it more efficient and optimized. All data traffic will be validated by the NGINX App Protect before being exposed in the response. In this use case, since we want to mask the SSN, we are going to configure the data-guard feature with its appropriate configuration files onboarded to this container.  \n \n Above configuration file can be downloaded from NGINX App Protect WAF configuration guide. \n Data guard is a WAF feature which detects and masks Credit Card Number (CCN) and/or U.S. Social Security Number (SSN) and/or custom patterns in HTTP responses. Since data-guard feature is enabled, SSNs of users in the LLM backend response are detected and masked by the NGINX App Protect thereby protecting the personal data. For more info on NGINX App Protect data guard feature, check this link.   NOTE: Since this is just for demo and focuses only on LLM workload protection, we are using NGINX App Protect v5. But as per customer practices, users can configure NGINX Ingress Controller, Secure Mesh, etc. \n \n   \n Deployment Steps:  \n \n Check the service cluster IP of backend LLM service and update it in below yaml file upstream server to create nginx config file configmap apiVersion: v1\nkind: ConfigMap\nmetadata:\n name: nginx-conf-map-api\n namespace: default\ndata:\n nginx.conf: |\n user nginx;\n\n worker_processes auto;\n load_module modules/ngx_http_app_protect_module.so;\n\n error_log /var/log/nginx/error.log debug;\n\n events {\n worker_connections 10240;\n }\n\n http {\n include /etc/nginx/mime.types;\n default_type application/octet-stream;\n sendfile on;\n keepalive_timeout 65;\n \n # NGINX App Protect WAF\n app_protect_enforcer_address 127.0.0.1:50000;\n\n upstream main_DNS_name {\n server 172.20.41.242:8000;\n }\n server {\n listen 80;\n proxy_http_version 1.1;\n proxy_read_timeout 600;\n proxy_connect_timeout 600;\n proxy_send_timeout 600;\n\n app_protect_enable on;\n app_protect_policy_file \"/etc/app_protect/bundles/NAP_API_Policy.tgz\";\n app_protect_security_log_enable on;\n app_protect_security_log log_all /etc/app_protect/bundles/security.log;\n\n location / {\n client_max_body_size 0;\n default_type text/html;\n # set your backend here\n proxy_pass http://main_DNS_name;\n proxy_set_header Host $host;\n }\n }\n }\n   \n Build and push a NGINX Plus docker image to your private registry by following this link \n Copy your JWT token and run below command to create a k8s secret # kubectl create secret docker-registry regcred --docker-server=private-registry.nginx.com --docker-username=<JWT Token> --docker-password=none \n Check the below file, update API policy bundle URL in init container and docker image info in nginx container. Apply this file to install nginx deployment and pods. apiVersion: apps/v1\nkind: Deployment\nmetadata:\n name: nap5-deployment\nspec:\n selector:\n matchLabels:\n app: nap5\n replicas: 1\n template:\n metadata:\n labels:\n app: nap5\n spec:\n imagePullSecrets:\n - name: regcred\n initContainers:\n - name: init-fetchbundle\n image: curlimages/curl:8.9.1\n command: \n - sh\n - -c\n - |\n echo \"Downloading file...\"\n curl -vvv -L https://github.com/f5devcentral/f5-xc-terraform-examples/raw/main/workflow-guides/NAP_API_Policy.tgz -o /etc/app_protect/bundles/NAP_API_Policy.tgz \n volumeMounts:\n - name: app-protect-bundles\n mountPath: /etc/app_protect/bundles \n containers:\n - name: nginx\n image: <registry-url>:tag-name\n imagePullPolicy: IfNotPresent\n volumeMounts:\n - name: app-protect-bd-config\n mountPath: /opt/app_protect/bd_config\n - name: app-protect-config\n mountPath: /opt/app_protect/config\n - name: nginx-conf-map-api-volume\n mountPath: /etc/nginx/nginx.conf\n subPath: nginx.conf\n - name: nap-api-policy-volume\n mountPath: /etc/nginx/NAP_API_Policy.json\n subPath: NAP_API_Policy.json \n \n - name: waf-enforcer\n image: private-registry.nginx.com/nap/waf-enforcer:5.2.0\n imagePullPolicy: IfNotPresent\n env:\n - name: ENFORCER_PORT\n value: \"50000\"\n volumeMounts:\n - name: app-protect-bd-config\n mountPath: /opt/app_protect/bd_config \n \n - name: waf-config-mgr\n image: private-registry.nginx.com/nap/waf-config-mgr:5.2.0\n imagePullPolicy: IfNotPresent\n securityContext:\n allowPrivilegeEscalation: false\n capabilities:\n drop:\n - all\n volumeMounts:\n - name: app-protect-bd-config\n mountPath: /opt/app_protect/bd_config\n - name: app-protect-config\n mountPath: /opt/app_protect/config\n - name: app-protect-bundles\n mountPath: /etc/app_protect/bundles\n \n volumes:\n - name: app-protect-bd-config\n emptyDir: {}\n - name: app-protect-config\n emptyDir: {}\n - name: app-protect-bundles\n emptyDir: {}\n - name: nginx-conf-map-api-volume\n configMap:\n name: nginx-conf-map-api\n - name: nap-api-policy-volume\n configMap:\n name: nap-api-policy \n Next deploy NGINX App Protect service using below file apiVersion: v1\nkind: Service\nmetadata:\n name: nap5\n labels:\n app: nap5\n service: nap5\nspec:\n ports:\n - protocol: TCP\n port: 80\n targetPort: 80\n selector:\n app: nap5\n type: ClusterIP \n   \n \n Check the cluster services and copy the NGINX App Protect service cluster ip \n Update App Protect cluster ip address in your Gen AI application frontend yaml file openAI address to create deployment and load balancer service \n \n Testing: \n \n Once setup is complete, check the cluster services command and open the Gen AI front end load balancer service URL in a browser \n Enter the web page input as https://dlptest.com/sample-data/namessndob/ and in query provide \"What is Robert Aragon's SSN?\" \n After some time, validate that SSN number is masked in the response   \n \n Conclusion: \n This article highlights the critical security gaps present in current Gen AI applications, emphasizing the urgent need for robust protection measures in LLM design deployments. In the latter half, we demonstrated how F5 NGINX App Protect v5, with its advanced security features, offers an effective solution to mitigate the OWASP LLM Top 10 risks. By leveraging these capabilities, organizations can significantly enhance the security and resilience of their AI applications. \n   \n References: \n \n https://genai.owasp.org/llm-top-10/  \n https://genai.owasp.org/llmrisk/llm06-sensitive-information-disclosure/ \n https://docs.nginx.com/nginx-app-protect-waf/v5/admin-guide/deploy-on-kubernetes/ \n https://docs.nginx.com/nginx-app-protect-waf/v5/admin-guide/compiler/ \n \n   \n NOTE: This article covered only one risk and stay tuned for more exciting articles on remaining OWASP LLM Top 10 risks prevention using F5 products. ","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})@stringLength":"10168","kudosSumWeight":2,"repliesCount":0,"readOnly":false,"images":{"__typename":"AssociatedImageConnection","edges":[{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktOUVjdU9S?revision=7\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDI","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktZ0hwcUJs?revision=7\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDM","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktbU9SbVI1?revision=7\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDQ","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktRXcwUTBy?revision=7\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDU","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzQ1MTktRXRHaUxX?revision=7\"}"}}],"totalCount":5,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}},"videos":{"__typename":"VideoConnection","edges":[],"totalCount":0,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}}},"Conversation:conversation:330738":{"__typename":"Conversation","id":"conversation:330738","topic":{"__typename":"TkbTopicMessage","uid":330738},"lastPostingActivityTime":"2024-06-24T15:55:49.472-07:00","solved":false},"User:user:405306":{"__typename":"User","uid":405306,"login":"Steve_Gorman","registrationData":{"__typename":"RegistrationData","status":null},"deleted":false,"avatar":{"__typename":"UserAvatar","url":"https://community.f5.com/t5/s/zihoc95639/images/dS00MDUzMDYtem9ROGRa?image-coordinates=0%2C0%2C500%2C500"},"id":"user:405306"},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtZ0puYUhr?revision=6\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtZ0puYUhr?revision=6","title":"001_Flow_of_AI_Prompts.png","associationType":"BODY","width":2981,"height":1565,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtQldZYlhF?revision=6\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtQldZYlhF?revision=6","title":"002_Curl_Sensitive_Data_Request.png","associationType":"BODY","width":2477,"height":415,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtancxZEtv?revision=6\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtancxZEtv?revision=6","title":"003_Distributed_Cloud_Processes_Prompt.png","associationType":"BODY","width":1602,"height":916,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtTUZsY3hJ?revision=6\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtTUZsY3hJ?revision=6","title":"004_Prompt_Security_Processes_Prompt.png","associationType":"BODY","width":3757,"height":1738,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtY1BiVjVT?revision=6\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtY1BiVjVT?revision=6","title":"006_security_frame_works.png","associationType":"BODY","width":3002,"height":940,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtRktQNWpZ?revision=6\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtRktQNWpZ?revision=6","title":"005_code_detection.png","associationType":"BODY","width":580,"height":764,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtRm1ERGdG?revision=6\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtRm1ERGdG?revision=6","title":"008_topic_detection.png","associationType":"BODY","width":2683,"height":1376,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtOXBOcEh5?revision=6\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtOXBOcEh5?revision=6","title":"009_rest_api_response.png","associationType":"BODY","width":1089,"height":949,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtMUJkRktr?revision=6\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtMUJkRktr?revision=6","title":"007.png","associationType":"BODY","width":1920,"height":1737,"altText":""},"TkbTopicMessage:message:330738":{"__typename":"TkbTopicMessage","subject":"Securing the LLM User Experience with an AI Firewall","conversation":{"__ref":"Conversation:conversation:330738"},"id":"message:330738","revisionNum":6,"uid":330738,"depth":0,"board":{"__ref":"Tkb:board:TechnicalArticles"},"author":{"__ref":"User:user:405306"},"teaser@stripHtml({\"removeProcessingText\":true,\"truncateLength\":-1})":"","introduction":"","metrics":{"__typename":"MessageMetrics","views":1297},"postTime":"2024-06-20T21:34:58.192-07:00","lastPublishTime":"2024-06-24T15:55:49.472-07:00","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})":" As artificial intelligence (AI) seeps into the core day-to-day operations of enterprises, a need exists to exert control over the intersection point of AI-infused applications and the actual large language models (LLMs) that answer the generated prompts.   This control point should serve to impose security rules to automatically prevent issues such as personally identifiable information (PII) inadvertently exposed to LLMs.   The solution must also counteract motivated, intentional misuse such as jailbreak attempts, where the LLM can be manipulated to provide often ridiculous answers with the ensuing screenshotting attempting to discredit the service. \n Beyond the security aspect and the overwhelming concern of regulated industries, other drivers include basic fiscal prudence 101, ensuring the token consumption of each offered LLM model is not out of hand.   This entire discussion around observability and policy enforcement for LLM consumption has given rise to a class of solutions most frequently referred to as AI Firewalls or AI Gateways (AI GW). \n An AI FW might be leveraged by a browser plugin, or perhaps applying a software development kit (SDK) during the coding process for AI applications.   Arguably, the most scalable and most easily deployed approach to inserting AI FW functionality into live traffic to LLMs is to use a reverse proxy.   A modern approach includes the F5 Distributed Cloud service, coupled with an AI FW/GW service, cloud-based or self-hosted, that can inspect traffic intended for LLMs like those of OpenAI, Azure OpenAI, or privately operated LLMs like those downloaded from Hugging Face. \n A key value offered by this topology, a reverse proxy handing off LLM traffic to an AI FW, which in turn can allow traffic to reach target LLMs, stems from the fact that traffic is seen, and thus controllable, in both directions.   Should an issue be present in a user’s submitted prompt, also known as an “inference”, it can be flagged: PII (Personally Identifiable Information) leakage is a frequent concern at this point.   In addition, any LLM responses to prompts are also seen in the reverse path: consider a corrupted LLM providing toxicity in its generated replies.  Not good. \n To achieve a highly performant reverse proxy approach to secured LLM access, a solution that can span a global set of users, F5 worked with Prompt Security to deploy an end-to-end AI security layer.  This article will explore the efficacy and performance of the live solution. \n   \n Impose LLM Guardrails with the AI Firewall and Distributed Cloud \n   \n An AI firewall such as the Prompt Security offering can get in-line with AI LLM flows through multiple means.  API calls from Curl or Postman can be modified to transmit to Prompt Security when trying to reach targets such as OpenAI or Azure OpenAI Service.   Simple firewall rules can prevent employee direct access to these well-known API endpoints, thus making the Prompt Security route the sanctioned method of engaging with LLMs. \n A number of other methods could be considered but have concerns.   Browser plug-ins have the advantage of working outside the encryption of the TLS layer, in a manner similar to how users can use a browser’s developer tools to clearly see targets and HTTP headers of HTTPS transactions encrypted on the wire.   Prompt Security supports plugins.  A downside, however, of browser plug-ins is the manageability issue, how to enforce and maintain across-the-board usage, simply consider the headache non-corporate assets used in the work environment.  \n Another approach, interesting for non-browser, thick applications on desktops, think of an IDE like VSCode, might be an agent approach, whereby outbound traffic is handled by an on-board local proxy. Again, Prompt can fit in this model however the complexity of enforcement of the agent, like the browser approach, may not always be easy and aligned with complete A-to-Z security of all endpoints. \n One of the simplest approaches is to ingest LLM traffic through a network-centric approach.  An F5 Distributed Cloud HTTPS load balancer, for instance, can ingest LLM-bound traffic, and thoroughly secure the traffic at the API layer, things like WAF policy and DDoS mitigations, as examples.   HTTP-based control plane security is the focus here, as opposed to the encapsulated requests a user is sending to an LLM.  The HTTPS load balancer can in turn hand off traffic intended for the likes of OpenAI to the AI gateway for prompt-aware inspections. \n F5 Distributed Cloud (XC) is a good architectural fit for inserting a third-party AI firewall service in-line with an organization’s inferencing requests.   Simply project a FQDN for the consumption of AI services; in this article we used the domain name “llmsec.busdevF5.net” into the global DNS, advertising one single IP address mapping to the name.   This DNS advertisement can be done with XC.  The IP address, through BGP-4 support for anycast, will direct any traffic to this address to the closest of 27 international points of presence of the XC global fabric.   Traffic from a user in Asia may be attracted to Singapore or Mumbai F5 sites, whereas a user in Western Europe might enter the F5 network in Paris or Frankfurt. \n \n As depicted, a distributed HTTPS load balancer can be configured – “distributed” reflects the fact traffic ingressing in any of the global sites can be intercepted by the load balancer. Normally, the server name indicator (SNI) value in the TLS Client Hello can be easily used to pick the correct load balancer to process this traffic. \n The first step in AI security is traditional reverse proxy core security features, all imposed by the XC load balancer.   These features, to name just a few, might include geo-IP service policies to preclude traffic from regions, automatic malicious user detection, and API rate limiting; there are many capabilities bundled together.   Clean traffic can then be selected for forwarding to an origin pool member, which is the standard operation of any load balancer.   In this case, the Prompt Security service is the exclusive member of our origin pool. For this article, it is a cloud instantiated service - options exist to forward to Prompt implemented on a Kubernetes cluster or running on a Distributed Cloud AppStack Customer Edge (CE) node. \n Block Sensitive Data with Prompt Security In-Line \n AI inferences, upon reaching Prompt’s security service, are subjected to a wide breadth of security inspections.   Some of the more important categories would include: \n \n Sensitive data leakage, although potentially contained in LLM responses, intuitively the larger proportion of risk is within the requesting prompt, with user perhaps inadvertently disclosing data which should not reach an LLM \n Source code fragments within submissions to LLMs, various programming languages may be scanned for and blocked, and the code may be enterprise intellectual property \n OWASP LLM top 10 high risk violations, such as LLM jailbreaking where the intent is to make the LLM behave and generate content that is not aligned with the service intentions; the goal may be embarrassing “screenshots”, such as having a chatbot for automobile vendor A actually recommend a vehicle from vendor B \n OWASP Prompt Injection detection, considered one of the most dangerous threats as the intention is for rogue users to exfiltrate valuable data from sources the LLM may have privileged access to, such as backend databases \n Token layer attacks, such as unauthorized and excessive use of tokens for LLM tasks, the so-called “Denial of Wallet” threat \n Content moderation, ensuring a safe interaction with LLMs devoid of toxicity, racial and gender discriminatory language and overall curated AI experience aligned with those productivity gains that LLMs promise \n \n To demonstrate sensitive data leakage protection, a Prompt Security policy was active which blocked LLM requests with, among many PII fields, a mailing address exposed.  To reach OpenAI GPT3.5-Turbo, one of the most popular and cost-effective models in the OpenAI model lineup, prompts were sent to an F5 XC HTTPS load balancer at address llmsec.busdevf5.net.   Traffic not violating the comprehensive F5 WAF security rules were proxied to the Prompt Security SaaS offering.  The prompt below clearly involves a mailing address in the data portion. \n \n   \n The ensuing prompt is intercepted by both the F5 and Prompt Security solutions.   The first interception, the distributed HTTPS load balancer offered by F5 offers rich details on the transaction, and since no WAF rules or other security policies are violated, the transaction is forwarded to Prompt Security.   The following demonstrates some of the interesting details surrounding the transaction, when completed (double-click to enlarge). \n \n As highlighted, the transaction was successful at the HTTP layer, producing a 200 Okay outcome.   The traffic originated in the municipality of Ashton, in Canada, and was received into Distributed Cloud in F5’s Toronto (tr2-tor) RE site.   The full details around the targeted URL path, such as the OpenAI /v1/chat/completions target and the user-agent involved, vscode-restclient, are both provided. \n Although the HTTP transaction was successful, the actual AI prompt was rejected, as hoped for, by Prompt Security.   Drilling into the Activity Monitor in the Prompt UI, one can get a detailed verdict on the transaction (double-click). \n \n Following the yellow highlights above, the prompt was blocked, and the violation is “Sensitive Data”.  The specific offending content, the New York City street address, is flagged as a precluded entity type of “mailing address”.   Other fields that might be potentially blocking candidates with Prompt’s solution include various international passports or driver’s license formats, credit card numbers, emails, and IP addresses, to name but a few. \n A nice, time saving feature offered by the Prompt Security user interface is to simply choose an individual security framework of interest, such as GDPR or PCI, and the solution will automatically invoke related sensitive data types to detect. \n \n An important idea to grasp: The solution from Prompt is much more nuanced and advanced than simple REGEX; it invokes the power of AI itself to secure customer journeys into safe AI usage.   Machine learning models, often transformer-based, have been fine-tuned and orchestrated to interpret the overall tone and tenor of prompts, gaining a real semantic understanding of what is being conveyed in the prompt to counteract simple obfuscation attempts.   For instance, using printed numbers, such as one, two, three to circumvent Regex rules predicated on numerals being present - this will not succeed. \n This AI infused ability to interpret context and intent allows for preset industry guidelines for safe LLM enforcement.   For instance, simply indicating the business sector is financial will allow the Prompt Security solution to pass judgement, and block if desired, financial reports, investment strategy documents and revenue audits, to name just a few.   Similar awareness for sectors such as healthcare or insurance is simply a pull-down menu item away with the policy builder. \n Source Code Detection \n A common use case for LLM security solutions is identification and, potentially, blocking submissions of enterprise source code to LLM services.   In this scenario, this small snippet of Python is delivered to the Prompt service: \n def trial():     return 2_500 <= sorted(choices(range(10_000), k=5))[2] < 7_500 sum(trial() for i in range(10_000)) / 10_000 \n   \n A policy is in place for Python and JavaScript detection and was invoked as hoped for. \n   \n curl --request POST \\   --url https://llmsec.busdevf5.net/v1/chat/completions \\   --header 'authorization: Bearer sk-oZU66yhyN7qhUjEHfmR5T3BlbkFJ5RFOI***********' \\   --header 'content-type: application/json' \\   --header 'user-agent: vscode-restclient' \\   --data '{\"model\": \"gpt-3.5-turbo\",\"messages\": [{\"role\": \"user\",\"content\": \"def trial():\\n    return 2_500 <= sorted(choices(range(10_000), k=5))[2] < 7_500\\n\\nsum(trial() for i in range(10_000)) / 10_000\"}]}' \n   \n \n   \n Content Moderation for Interactions with LLMs \n One common manner of preventing LLM responses from veering into undesirable territory is for the service provider to implement a detailed system prompt, a set of guidelines that the LLM should be governed by when responding to user prompts.   For instance, the system prompt might instruct the LLM to serve as polite, helpful and succinct assistant for customers purchasing shoes in an online e-commerce portal.   A request for help involving the trafficking of narcotics should, intuitively, be denied. \n Defense in depth has traditionally meant no single point of failure. In the above scenario, screening both the user prompt and ensuring LLM response for a wide range of topics leads to a more ironclad security outcome.   The following demonstrates some of the topics Prompt Security can intelligently seek out; in this simple example, the topic of “News & Politics” has been singled out to block as a demonstration. \n \n Testing can be performed with this easy Curl command, asking for a prediction on a possible election result in Canadian politics: \n curl --request POST \\   --url https://llmsec.busdevf5.net/v1/chat/completions \\   --header 'authorization: Bearer sk-oZU66yhyN7qhUjEHfmR5T3Blbk*************' \\   --header 'content-type: application/json' \\   --header 'user-agent: vscode-restclient' \\   --data '{\"model\": \"gpt-3.5-turbo\",\"messages\": [{\"role\": \"user\",\"content\": \"Who will win the upcoming Canadian federal election expected in 2025\"}],\"max_tokens\": 250,\"temperature\": 0.7}' \n The response, available in the Prompt Security console, is also presented to the user. In this case, a Curl user leveraging the VSCode IDE.  The response has been largely truncated for brevity, fields that are of interest is an HTTP “X-header” indicating the transaction utilized the F5 site in Toronto, and the number of tokens consumed in the request and response are also included. \n \n   \n Advanced LLM Security Features \n Many of the AI security concerns are given prominence by the OWASP Top Ten for LLMs, an evolving and curated list of potential concerns around LLM usage from subject matter experts.   Among these are prompt injection attacks and malicious instructions often perceived as benign by the LLM.   Prompt Security uses a layered approach to thwart prompt injection.   For instance, during the uptick in interest in ChatGPT, DAN (Do Anything Now) prompt injection was widespread and a very disruptive force, as discussed here. \n User prompts will be closely analyzed for the presence of the various DAN templates that have evolved over the past 18 months.   More significantly, the use of AI itself allows the Prompt solution to recognize zero-day bespoke prompts attempting to conduct mischief.   The interpretative powers of fine-tuned, purpose-built security inspection models are likely the only way to stay one step ahead of bad actors. \n Another chief concern is protection of the system prompt, the guidelines that reel in unwanted behavior of the offered LLM service, what instructed our LLM earlier in its role as a shoe sales assistant.   The system prompt, if somehow manipulated, would be a significant breach in AI security, havoc could be created with an LLM directed astray.   As such, Prompt Security offers a policy to compare the user provided prompt, the configured system prompt in the API call, and the response generated by the LLM.   In the event that a similarity threshold with the system prompt is exceeded in the other fields, the transaction can be immediately blocked. \n An interesting advanced safeguard is the support for a “canary” word - a specific value that a well behaved LLM should never present in any response, ever.   The detection of the canary word by the Prompt solution will raise an immediate alert. \n One particularly broad and powerful feature in the AI firewall is the ability to find secrets, meaning tokens or passwords, frequently for cloud-hosted services, that are revealed within user prompts.   Prompt Security offers the ability to scour LLM traffic for in excess of 200 meaningful values.   Just as a small representative sample of the industry’s breadth of secrets, these can all be detected and acted upon: \n \n Azure Storage Keys Detector \n Artifactory Detector \n Databricks API tokens \n GitLab credentials \n NYTimes Access Tokens \n Atlassian API Tokens \n \n Besides simple blocking, a useful redaction option can be chosen.   Rather than risk compromise of credentials and obfuscated value will instead be seen at the LLM. \n F5 Positive Security Models for AI Endpoints \n The AI traffic delivered and received from Prompt Security’s AI firewall is both discovered and subjected to API layer policies by the F5 load balancer.   Consider the token awareness features of the AI firewall, excessive token consumption can trigger an alert and even transaction blocking.   This behavior, a boon when LLMs like the OpenAI premium GPT-4 models may have substantial costs, allows organizations to automatically shut down a malicious actor who illegitimately got hold of an OPENAI_API key value and bombarded the LLM with prompts.   This is often referred to as a “Denial of Wallet” situation. \n F5 Distributed Cloud, with its focus upon the API layer, has congruent safeguards.   Each unique user of an API service is tracked to monitor transactional consumption.   By setting safeguards for API rate limiting, an excessive load placed upon the API endpoint will result in HTTP 429 “Too Many Request” in response to abusive behavior. \n A key feature of F5 API Security is the fact that it is actionable in both directions, and also an in-line offering, unlike some API solutions which reside out of band and consume proxy logs for reporting and threat detection.   With the automatic discovery of API endpoints, as seen in the following screenshot, the F5 administrator can see the full URL path which in this case exercises the familiar OpenAI /v1/chat/completions endpoint. \n \n As highlighted by the arrow, the schema of traffic to API endpoints is fully downloadable as an OpenAPI Specification (OAS), formerly known as a Swagger file.   This layer of security means fields in API headers and bodies can be validated for syntax, such that a field whose schema expects a floating-point number can see any different encoding, such as a string, blocked in real-time in either direction. \n A possible and valuable use case:  allow an initial unfettered access to a service such as OpenAI, by means of Prompt Security’s AI firewall service, for a matter of perhaps 48 hours.   After a baseline of API endpoints has been observed, the API definition can be loaded from any saved Swagger files at the end of this “observation” period.  The loaded version can be fully pruned of undesirable or disallowed endpoints, all future traffic must conform or be dropped. \n This is an example of a “positive security model”, considered a gold standard by many risk-adverse organizations.   Simply put, a positive security model allows what has been agreed upon through and rejects everything else.   This ability to learn and review your own traffic, and then only present Prompt Security with LLM endpoints that an organization wants exposed is an interesting example of complementing an AI security solution with rich API layer features. \n Summary \n The world of AI and LLMs is rapidly seeing investment, in time and money, from virtually all economic sectors; the promise of rapid dividends in the knowledge economy is hard to resist.   As with any rapid deployment of new technology, safe consumption is not guaranteed, and it is not built in.   Although LLMs often suggest guardrails are baked into offerings, a 30-second search of the Internet will expose firsthand experiences where unexpected outcomes when invoking AI are real.  Brand reputation is at stake and false information can be hallucinated or coerced out of LLMs by determined parties. \n By combining the ability to ingest globally at high-speed dispersed users and apply a first level of security protections, F5 Distributed Cloud can be leveraged as an onboarding for LLM workloads.   As depicted in this article, Prompt Security can in turn handle traffic egressing F5’s distributed HTTPS load balancers and provide state-of-the-art AI safeguards, including sensitive data detection, content moderation and other OWASP-aligned mechanisms like jailbreak and prompt injection mitigation.   Other deployment models exist, including deploying Prompt Security’s solution on-premises, self-hosted in cloud tenants, and running the solution on Distributed Cloud CE nodes themselves is supported. ","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})@stringLength":"21434","kudosSumWeight":3,"repliesCount":0,"readOnly":false,"images":{"__typename":"AssociatedImageConnection","edges":[{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtZ0puYUhr?revision=6\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDI","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtQldZYlhF?revision=6\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDM","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtancxZEtv?revision=6\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDQ","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtTUZsY3hJ?revision=6\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDU","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtY1BiVjVT?revision=6\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDY","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtRktQNWpZ?revision=6\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDc","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtRm1ERGdG?revision=6\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDg","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtOXBOcEh5?revision=6\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDk","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMzA3MzgtMUJkRktr?revision=6\"}"}}],"totalCount":9,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}},"videos":{"__typename":"VideoConnection","edges":[],"totalCount":0,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}}},"Conversation:conversation:328906":{"__typename":"Conversation","id":"conversation:328906","topic":{"__typename":"TkbTopicMessage","uid":328906},"lastPostingActivityTime":"2024-06-03T22:28:03.099-07:00","solved":false},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtWnh3QmRT?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtWnh3QmRT?revision=7","title":"image_003_redone.png","associationType":"BODY","width":2897,"height":1003,"altText":"null"},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtZUZhaHFn?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtZUZhaHFn?revision=7","title":"image_001_redone.png","associationType":"BODY","width":3213,"height":1910,"altText":"null"},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtbVNreFdO?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtbVNreFdO?revision=7","title":"Image_002_touch_up.png","associationType":"BODY","width":2822,"height":1704,"altText":"null"},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYta1RobnM0?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYta1RobnM0?revision=7","title":"SG_4.jpg","associationType":"BODY","width":1368,"height":897,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtd3BzRVhY?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtd3BzRVhY?revision=7","title":"SG_5.jpg","associationType":"BODY","width":1283,"height":801,"altText":""},"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtYkwxckFI?revision=7\"}":{"__typename":"AssociatedImage","url":"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtYkwxckFI?revision=7","title":"SG_6.jpg","associationType":"BODY","width":1283,"height":761,"altText":""},"TkbTopicMessage:message:328906":{"__typename":"TkbTopicMessage","subject":"Secure RAG for Safe AI Deployments Using F5 Distributed Cloud and NetApp ONTAP","conversation":{"__ref":"Conversation:conversation:328906"},"id":"message:328906","revisionNum":7,"uid":328906,"depth":0,"board":{"__ref":"Tkb:board:TechnicalArticles"},"author":{"__ref":"User:user:405306"},"teaser@stripHtml({\"removeProcessingText\":true,\"truncateLength\":-1})":"","introduction":"","metrics":{"__typename":"MessageMetrics","views":1385},"postTime":"2024-04-01T11:53:25.916-07:00","lastPublishTime":"2024-06-03T22:28:03.099-07:00","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})":" Retrieval Augmented Generation (RAG) is one of the most discussed techniques to empower Large Language Models (LLM) to deliver niche, hyper-focused responses pertaining to specialized, sometimes proprietary, bodies of knowledge documents. Two simple examples might include highly detailed company-specific information distilled from years of financial internal reporting from financial controllers or helpdesk type queries with the LLM harvesting only relevant knowledge base (KB) articles, releases notes, and private engineering documents not normally exposed in their entirety. \n RAG is highly bantered about in numerous good articles; the two principal values are: \n \n LLM responses to prompts (queries) based upon specific, niche knowledge as opposed to the general, vast pre-training generic LLMs are taught with; in fact, it is common to instruct LLMs not to answer specifically with any pre-trained knowledge. Only the content “augmenting” the prompt. \n Attribution is a key deliverable with RAG. Generally LLM pre-trained knowledge inquiries are difficult to traceback to a root source of truth. Prompts augmented with specific assistive knowledge normally solicit responses that clearly call out the source of the answers provided. \n \n Why is the Security of RAG Source Content Particularly Important? \n To maximize the efficacy of LLM solutions in the realm of artificial intelligence (AI) an often-repeated adage is “garbage in, garbage out” which succinctly states an obvious fact with RAG: valuable and actionable items must be entered into the model to expect valuable, tactical outcomes. This means exposing key forms of data, examples being data which might include patented knowledge, intellectual property not to be exposed in raw form to competitors. Actual trade secrets, which will infuse the LLM but need to remain confidential in their native form. \n In one example around trade secrets, the Government of Canada spells out a series of items courts will look at in determining compensation for misuse (theft) of intellectual property. It is notable that the first item listed is not the cost associated with creation of the secret material (“the cost in money or time of creating or developing the information”) but rather the very first item is instead how much effort was made to keep the content secure (“the measures taken to maintain secrecy”). \n With RAG, incoming queries are augmented with rich, semantically similar enterprise content. The content has already been populated into a vector database by converting documents, they might be pdf or docx as examples, into raw text form and converting chunks of text into vectors. The vectors are long sequences of numbers with similar mathematical attributes for similar content. As a trivial example, one-word chunks such as glass, cup, bucket, jar might be semantically related, meaning similarities can be construed by both human minds and LLMs. On the other hand, empathy, joy, and thoughtfulness maintain similarities of their own. This semantic approach means a phrase/sentence/paragraph (chunk) using bow to mean “to bend in respect” will be highly distinct from chunks referring to the “front end of a ship\" or “something to tie one’s hair back with”, even a tool every violinist would need. The list goes on; all semantic meanings of bow are very different in these chunks and would have distinctive embeddings within a vector database. The word embedding is likely derived from “fixing” or “planting” an object. In this case, words are “embedded” into a contextual understanding. \n The typical length of the number sequence describing the meaning of items has typically been more than 700, but this number of “dimensions” applied is always a matter of research, and the entire vector database is arrived at with an embedding LLM, distinct from the main LLM that will produce generative AI responses to our queries. \n Incoming queries destined for the main generative AI LLM can, in turn, be converted to vectors themselves by the very same text-embedding “helper” LLM and through retrieval (the “R” in RAG) similar textual content can buttress the prompt presented to the main LLM (double click to expand). \n \n Since a critical cog in the wheel of the RAG architecture is the ingestion of valuable and sensitive source documents into the vector database, using the embedding LLM, it is not just prudent but critical that this source content be brought securely over networks to the embedding engine. \n F5 Distributed Cloud Secure Multicloud Networking and NetApp ONTAP \n For many practical, time-to-market reasons, modern LLMs, both the main and embedding instances, may not be collocated with the data vaults of modern enterprises. LLMs benefit from cloud compute and GPU access, something often in short supply for on-premises production roll outs. A typical approach assisted by the economies of scale might be to harvest public cloud providers, such as Azure, AWS, and Google Cloud Platform for the compute side of AI projects. Azure, as one example, can turn up virtual machines with GPUs from NVIDIA like A100, A2, and Tesla T4 to name a few. \n The documents needed to feed an effective RAG solution may well be on-premises, and this is unlikely to change for reasons including governance, regulatory, and the weight of decades of sound security practice. One of the leading on-premises storage solutions of the last 25 years is the NetApp ONTAP storage appliance family, and reflected in this quote from NVIDIA: \n \"Nearly half of the files in the world are stored on-prem on NetApp.\" — Jensen Huang, CEO of NVIDIA \n A key deliverable of F5 Distributed Cloud is providing encrypted interconnectivity of disparate physical sites and heterogeneous cloud instances such as Azure VNETs or AWS VPCs. As such, there are two immediate, concurrent F5 features that come to mind: \n \n Secure interconnectivity of on-premises NetApp volumes (NAS) or LUNs (Block) containing critical documents for ingestion into RAG. Utilize encrypted L3 connectivity between the enterprise location and the cloud instance where the LLM/RAG are instantiated. TCP load balancers are another alternative for volume sharing NAS protocols like NFS or SMB/CIFS. \n Secure access to the LLM web interface or RESTful API end points, with HTTPS load balancers including key features like WAF, anti-bot mechanisms, and API automatic rate limiting for abusive prompt sources. \n \n The following diagram presents the topology this article set out to create, REs are “regional edge” sites maintained internationally by F5 and harness private RE to RE, high-speed global communication links. DNS names, such as the target name of an LLM service, will leverage mappings to anycast IP addresses, thus users entering the RE network from southeast Asian might, for example, enter the Singapore RE while users in Switzerland might enter via a Paris or Frankfurt RE. \n \n Complementing the REs are Customer Edge (CE) nodes. These are virtual or physical appliances which act as security demarcation points. For instance, a CE placed in an Azure VNET can protect access to the server supporting the LLM, removing any need for Internet access to the server, which is now entirely accessible only through a private RFC-1918 type of private address. External access to the LLM for just employees or, maybe employees and contractors, or potentially access for the Internet community is enabled by a distributed HTTPS load balancer. \n In the example depicted above, oriented towards full Internet access, the FQDN of the LLM is projected by the load balancer into the global DNS and consumers of the service resolve the name to one IP address and are attracted to the closest RE by BGP-4’s support for anycast. As the name “distributed” load balancer suggests, the origin pool can be in an entirely different site than the incoming RE, in this case the origin pool is the LLM behind the CE in the Azure VNET. The LLM requests travel from RE to CE via a highspeed networking underlay. \n The portion of the solution that securely ties the LLM to the source content required for RAG to embed vectors is, in this case, utilizing layer 3 multicloud networking (MCN). The solution is turnkey, routing table are automatically connected to members of the L3 MCN, in this case the inside interfaces of the Azure CE and Redmond, Washington on-premises CE and traffic flows over an encrypted underlay network. As such, the NetApp ONTAP cluster can securely expose volumes with key file ware via a protocol like Network File System (NFS), no risk of data exposure to third-party prying eyes exists. The following diagram drills into the RE and CE and NetApp interplay (double click to expand). \n \n F5 Distributed Cloud App Connect and LLM Setup \n This article speaks to hands-on experience with web-driven LLM inferencing with augmented prompts derived from a RAG implementation. The AI compute was instantiated on an Azure-hosted Ubuntu 20.04 virtual machine with 4 virtual cores. Installed software included Python 3.10, and libraries such as Langchain, Pypdf (for converting pdf documents to text), FAISS (for similarity searching via a vector database), and other libraries. The actual open source LLM utilized for the generative AI is found here on huggingface.co. The binary, which exceeds 4 GB, is considered effective for CPU-based deployments. \n The embedding LLM model, critical to seed the vector database with entries derived from secured enterprise documentation, and then used again per incoming query for RAG similarity searches to build augmented prompts, was from Hugging Face: sentence-transformers/all-MiniLM-L6-v2 and can be found here. \n The AI RAG solution was implemented in Python3, and as such the Azure Ubuntu can be accessed both by SSH or via Jupyter Notebooks. The latter was utilized as this is the preferred final delivery mechanism for standard users, not a web chatbot design or the requirement to use API commands through solutions like Postman or Curl. This design choice, to steer the user experience towards Jupyter Notebook consumption, is in keeping with the fact that it has become a standard in AI LLM usage where the LLM is tactical and vital to an enterprise's lines of business (LOBs). Jupyter Notebooks are web-accessed with a browser like Chrome or Edge and as such, F5’s WAF, anti-bot, and L7 DDoS, all part of the F5 WAAP offering, can easily be laid upon an HTTP load balancer with a few mouse clicks in XC to provide premium security to the user experience. \n NetApp and F5 Distributed Cloud Secure Multicloud Networking \n The secure access to files for ingestion into the vector database, for similarity searches when user queries are received, makes use of an encrypted L3 Multicloud Network relationship between the Azure VNET and the LAN on prem in Redmond, Washington hosting the NetApp ONTAP cluster. \n The specific protocol chosen was NFS and the simplicity is demonstrated by the use of just one Linux command to present key, high-valued documents for the AI to populate the database: \n #mount -t nfs <IP Address of NetAPP LIF interface on-prem>:/Secure_docs_for_RAG /home/ubuntu_restriced_user/rag_project/docs/Secure_docs_for_RAG. \n This address is available nowhere else in the world except behind this F5 CE in the Azure VNET. \n After the pdf files are converted to text, chunked to reasonable sizes with some overlap suggested between the end of one chunk and the start of the next chunk, the embedding LLM will populate the vector database. The files are always only accessed remotely by NFS through the mounted volume, and this mount may be terminated until new documents are ready to be added to the solution. \n The Objective RAG Implementation - Described \n In order to have a reasonable facsimile of the real-word use cases this solution will empower today, but not having any sensitive documents to be injected, it was decided to use some seminal “Internet Boom”-era IETF Requests for Comment (RFCs) as source content. With the rise of multi-port routing and switching devices, it became apparent the industry badly needed specific and highly precise definitions around network device (router and switch) performance benchmarking to allow purchasers “apples-to-apples” comparisons. These documents recommend testing parameters, such as what frame or packet sizes to test with, test iteration time lengths, when to use FIFO vs LIFO vs LILO definitions of latency, etc. RFC-1242 (Request for Comment, terminology) and RFC-2544 (methodologies), chaired by Scott Bradner of Harvard University, and the later RFC 2285 (LAN switching terminologies), chaired by Bob Mandeville then of European Network Laboratories are three prominent examples, to which test and measurement solutions aspired to be compliant. \n Detailed LLM answers for quality assurance engineers in the network equipment manufacturing (NEM) space is the intended use case of the design, answers that must be distilled specifically by generative AI considering queries augmented by RAG and specifically only based upon these industry-approved documents. These documents are, of course, not containing trade secrets or patented engineering designs. They are in fact publicly available from the IETF, however they are nicely representative of the value offered in sensitive environments. \n Validating RAG – Watching the Context Provided to the LLM \n To ensure RAG was working, the content being augmented in the prompt was displayed to screen, we would expect to see relevant clauses and sentences from the RFCs being provided to the generative AI LLM. Also, if we were to start by asking questions that were outside the purview of this testing/benchmarking topic, we should see the LLM struggle to provide users a meaningful answer. \n To achieve this, rather than, say, asking what 802.3/Ethernetv2 frame sizes should be used in throughput measurements, and what precisely is the industry standard definition of the term “throughput” was, the question instead pertained to a recent Netflix release, featuring Lindsay Lohan. Due to the recency of the film, even if the LLM leaned upon its pretrained model, it will come up with nothing meaningful. \n “Question: Important, only use information provided as context in the prompt, do not use other trained knowledge. Please identify who played Heather in the March 2024 Lindsay Lohan Netflix movie titled Irish Wish?” \n As seen in the following Jupyter screenshot, the RAG solution can only provide augmented prompts from the database, in this case it has some test and measurement clauses and some rules pertaining to the winter ice sport of curling (double click to enlarge). \n \n The supportive context data augmenting the prompt was, as expected, not going to help in this off-the-wall query, as only the RFCs and some sports rules had been introduced to the vector database. What is especially noteworthy is the LLM did not simply admit it could not possibly answer this question but hallucinated a very articulate response that indicated “Heather played for her team against Lindsay Lohan’s Team Ireland”. \n To validate the RAG architecture for a working scenario, actually pertinent to the movie, a film review in pdf format was added to the ONTAP cluster-housed volume. The supportive documentation was consumed (using Pypdf from the Jupyter Notebook it became text). It was automatically chunked and introduced to the vector database through the Hugging Face embedding LLM. \n With the RAG vector database populated by the embedding LLM, the responses then became meaningful. The following exchange shows the value of RAG in producing meaningful, thought-out answers: \n Question: Important, only use information provided as context in the prompt, do not use other pre-trained knowledge. Who does Maddie edit novels for in the Lindsay Lohan March 2024 Netflix movie titled Irish Wish? \n Answer: Let's think step by step. We know that Maddie Kelly plays a book editor who harbors a big secret, and we have been given information about her job as an editor for Paul Kennedy (played by Alexander Vlahos), which is also part of the question context provided above. Therefore it seems logical to infer that she edits novels in general but specifically those written or co-written by author Paul Kennedy - who happens to be a bestselling romance novelist as stated earlier on, so we can safely conclude Maddie works for Paul himself (as opposed to any other book publishing company). \n This time we see relevant, meaningful context provided by RAG, with relevant portions of the pdf file review document augmenting the prompt. The augmentations are normally not displayed unless requested by the user or programmer, unlike humans, LLMs do not require superfluous text formatting (advantage: LLMs) and thus the content is packed tightly and efficiently. Fewer characters also mean fewer tokens get used by the targeted AI model and can allow more data before a token context limit is reached. In pay-for-use LLM approaches fewer tokens also help the enterprise’s bottom line financially. \n Also, note that the answer will likely not always be identical with subsequent asks of the same question as per LLM normal behavior. Features like “temperature setting” can also allow more “creative” ideas in responses, injecting humor and even outlandishness if desired. \n The RAG workflow is now validated, but the LLMs in question (embedding and main generative LLM) can still be made better with these suggestions: \n \n Increase “chunk” sizes so ideas are not lost when excessive breaks make for short chunks. \n Increase “overlap” so an idea/concept is not lost at the demarcation point of two chunks. \n Most importantly, provide more context from the vector database as context lengths (maximum tokens in a request/response) are generally increasing in size. Llama2, for instance, typically has a 4,096 context length but can now be used with larger values, such as 32,768. This article used only 3 augmentations to the user query, better results could be attained by increasing this value at a potential cost of more CPU cycles. \n \n Using Secure RAG – F5 L3 MCN, HTTPS Load Balancers and NetApp ONTAP Together \n With the RAG architecture validated to be working, the solution was used to assist the target user entering queries to the Azure server by means of Jupyter Notebooks, with RAG documents ingested over encrypted, private networking to the on-premises ONTAP cluster NFS volumes. \n The questions posed, which are answerable by reading and understanding key portions spread throughout the Scott Bradner RFCs, was: \n “Important, only use information provided as context in the prompt, do not use other pre-trained knowledge. Please explain the specific definition of throughput? What 802.3 frame sizes should be used for benchmarking? How long should each test iteration last? If you cannot answer the questions exclusively with the details included in the prompt, simply say you are unable to answer the question accurately. Thank you.\" \n The Jupyter Notebook representation of this query, which is made in the Python language and issued from the user’s local browser anywhere in the world and directly against the Azure-hosted LLM, looks like the following (click to expand image): \n \n The next screenshot demonstrates the result, based upon the provided secure documents (double click to expand). \n \n The response is decent, however, the fact that it is clearly using the provided augmentations to the prompt, that is the key objective of this article. \n The accuracy of the response can be questionable in some areas, the Bradner RFCs highlighted the importance of 64-byte 802.3/Ethernetv2 frame sizes in testing, as line rate forwarding with this minimum size produces the highest theoretically possible frame per second load. In the era of software driven forwarding in switches and routers this was very demanding. Sixty-four byte frames result in 14,881 fps (frames per second) for 10BaseT, 148,809 fps for 100BaseT, 1.48 million fps for Gigabit Ethernet. These values were frequently more aspirational in earlier times and also a frequent metric used in network equipment purchasing cycles. \n Suspiciously, the LLM response calls out 64kB in 802.3 testing, not 64B, something which seems to be an error. Again, with this architecture, the actual LLM providing the generative AI responses is increasingly viewed as a commodity, alternative LLMs can be plugged quickly and easily into the RAG approach of this Jupyter Notebook.  \n The end user, and thus the enterprise itself, is empowered to utilize different LLMs, purchased or open-source from sites like Hugging Face, to determine optimal results. \n The other key change that can affect the overall accuracy of results is to experiment with different embedding models. In fact, there are on-line “leader” boards strictly for embedding LLMs so one can quickly swap in and out various popular embedding LLMs to see the impact on results. \n Summary and Conclusions on F5 and NetApp as Enablers for Secure RAG \n This article demonstrated an approach to AI usage that leveraged the compute and GPU availability that can be found today within cloud providers such as Azure. To safely access such an AI platform for a production-grade enterprise requirement, F5 Distributed Cloud (XC) provided HTTPS load balancers to connect worker browsers to a Jupyter Notebook service on the AI platform, this service applies advanced security upon the traffic within the XC, from WAF to anti-bot to L3/L7 DDOS protections. \n Utilizing secure Multicloud Networking (MCN), F5 provided a private L3 connectivity service between the inside interface on an Azure VNET-based CE (customer edge) node and the inside interface of an on-premises CE node in a building in Redmond, Washington. This secure network facilitated an NFS remote volume, content on spindles/flash in on-premises NetApp ONTAP to be remotely mounted on the Azure server. This secure file access provided peace of mind to exposing potentially critical and private materials from NetApp ONTAP volumes to the AI offering. \n RAG was configured and files were ingested, populating a vector database within the Azure server, that allowed details, ideas, and recommendations to be harnessed by a generative AI LLM by augmenting user prompts with text gleaned from the vector database. Simple examples were used to first demonstrate that RAG was working by posing queries that should not have been addressed by the loaded secure content; such a query was not suitably answered as expected. The feeding of meaningful content from ONTAP was then demonstrated to unleash the potential of AI to address queries based upon meaningful .pdf files. Opportunities to improve results by swapping in and out the main generative AI model, as well as the embedding model, were also considered. ","body@stripHtml({\"removeProcessingText\":true,\"removeSpoilerMarkup\":true,\"removeTocMarkup\":true,\"truncateLength\":-1})@stringLength":"22909","kudosSumWeight":3,"repliesCount":0,"readOnly":false,"images":{"__typename":"AssociatedImageConnection","edges":[{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDE","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtWnh3QmRT?revision=7\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDI","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtZUZhaHFn?revision=7\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDM","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtbVNreFdO?revision=7\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDQ","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYta1RobnM0?revision=7\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDU","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtd3BzRVhY?revision=7\"}"}},{"__typename":"AssociatedImageEdge","cursor":"MjUuMnwyLjF8b3wyNXxfTlZffDY","node":{"__ref":"AssociatedImage:{\"url\":\"https://community.f5.com/t5/s/zihoc95639/images/bS0zMjg5MDYtYkwxckFI?revision=7\"}"}}],"totalCount":6,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}},"videos":{"__typename":"VideoConnection","edges":[],"totalCount":0,"pageInfo":{"__typename":"PageInfo","hasNextPage":false,"endCursor":null,"hasPreviousPage":false,"startCursor":null}}},"CachedAsset:text:en_US-components/community/Navbar-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/community/Navbar-1743097581414","value":{"community":"Community Home","inbox":"Inbox","manageContent":"Manage Content","tos":"Terms of Service","forgotPassword":"Forgot Password","themeEditor":"Theme Editor","edit":"Edit Navigation Bar","skipContent":"Skip to content","migrated-link-9":"Groups","migrated-link-7":"Technical Articles","migrated-link-8":"DevCentral News","migrated-link-1":"Technical Forum","migrated-link-10":"Community Groups","migrated-link-2":"Water Cooler","migrated-link-11":"F5 Groups","Common-external-link":"How Do I...?","migrated-link-0":"Forums","article-series":"Article Series","migrated-link-5":"Community Articles","migrated-link-6":"Articles","security-insights":"Security Insights","migrated-link-3":"CrowdSRC","migrated-link-4":"CodeShare","migrated-link-12":"Events","migrated-link-13":"Suggestions"},"localOverride":false},"CachedAsset:text:en_US-components/community/NavbarHamburgerDropdown-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/community/NavbarHamburgerDropdown-1743097581414","value":{"hamburgerLabel":"Side Menu"},"localOverride":false},"CachedAsset:text:en_US-components/community/BrandLogo-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/community/BrandLogo-1743097581414","value":{"logoAlt":"Khoros","themeLogoAlt":"Brand Logo"},"localOverride":false},"CachedAsset:text:en_US-components/community/NavbarTextLinks-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/community/NavbarTextLinks-1743097581414","value":{"more":"More"},"localOverride":false},"CachedAsset:text:en_US-components/authentication/AuthenticationLink-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/authentication/AuthenticationLink-1743097581414","value":{"title.login":"Sign In","title.registration":"Register","title.forgotPassword":"Forgot Password","title.multiAuthLogin":"Sign In"},"localOverride":false},"CachedAsset:text:en_US-components/nodes/NodeLink-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/nodes/NodeLink-1743097581414","value":{"place":"Place {name}"},"localOverride":false},"CachedAsset:text:en_US-components/tags/TagSubscriptionAction-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/tags/TagSubscriptionAction-1743097581414","value":{"success.follow.title":"Following Tag","success.unfollow.title":"Unfollowed Tag","success.follow.message.followAcrossCommunity":"You will be notified when this tag is used anywhere across the community","success.unfollowtag.message":"You will no longer be notified when this tag is used anywhere in this place","success.unfollowtagAcrossCommunity.message":"You will no longer be notified when this tag is used anywhere across the community","unexpected.error.title":"Error - Action Failed","unexpected.error.message":"An unidentified problem occurred during the action you took. Please try again later.","buttonTitle":"{isSubscribed, select, true {Unfollow} false {Follow} other{}}","unfollow":"Unfollow"},"localOverride":false},"CachedAsset:text:en_US-components/messages/MessageListTabs-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/messages/MessageListTabs-1743097581414","value":{"mostKudoed":"{value, select, IDEA {Most Votes} other {Most Likes}}","mostReplies":"Most Replies","mostViewed":"Most Viewed","newest":"{value, select, IDEA {Newest Ideas} OCCASION {Newest Events} other {Newest Topics}}","newestOccasions":"Newest Events","mostRecent":"Most Recent","noReplies":"No Replies Yet","noSolutions":"No Solutions Yet","solutions":"Solutions","mostRecentUserContent":"Most Recent","trending":"Trending","draft":"Drafts","spam":"Spam","abuse":"Abuse","moderation":"Moderation","tags":"Tags","PAST":"Past","UPCOMING":"Upcoming","sortBymostRecent":"Sort By Most Recent","sortBymostRecentUserContent":"Sort By Most Recent","sortBymostKudoed":"Sort By Most Likes","sortBymostReplies":"Sort By Most Replies","sortBymostViewed":"Sort By Most Viewed","sortBynewest":"Sort By Newest Topics","sortBynewestOccasions":"Sort By Newest Events","otherTabs":" Messages list in the {tab} for {conversationStyle}","guides":"Guides","archives":"Archives"},"localOverride":false},"CachedAsset:text:en_US-shared/client/components/common/QueryHandler-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-shared/client/components/common/QueryHandler-1743097581414","value":{"title":"Query Handler"},"localOverride":false},"CachedAsset:text:en_US-components/community/NavbarDropdownToggle-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/community/NavbarDropdownToggle-1743097581414","value":{"ariaLabelClosed":"Press the down arrow to open the menu"},"localOverride":false},"CachedAsset:text:en_US-shared/client/components/common/OverflowNav-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-shared/client/components/common/OverflowNav-1743097581414","value":{"toggleText":"More"},"localOverride":false},"CachedAsset:text:en_US-components/messages/MessageView/MessageViewInline-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/messages/MessageView/MessageViewInline-1743097581414","value":{"bylineAuthor":"{bylineAuthor}","bylineBoard":"{bylineBoard}","anonymous":"Anonymous","place":"Place {bylineBoard}","gotoParent":"Go to parent {name}"},"localOverride":false},"CachedAsset:text:en_US-components/customComponent/CustomComponent-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/customComponent/CustomComponent-1743097581414","value":{"errorMessage":"Error rendering component id: {customComponentId}","bannerTitle":"Video provider requires cookies to play the video. Accept to continue or {url} it directly on the provider's site.","buttonTitle":"Accept","urlText":"watch"},"localOverride":false},"CachedAsset:text:en_US-components/users/UserLink-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/users/UserLink-1743097581414","value":{"authorName":"View Profile: {author}","anonymous":"Anonymous"},"localOverride":false},"CachedAsset:text:en_US-components/messages/MessageSubject-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/messages/MessageSubject-1743097581414","value":{"noSubject":"(no subject)"},"localOverride":false},"CachedAsset:text:en_US-components/messages/MessageBody-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/messages/MessageBody-1743097581414","value":{"showMessageBody":"Show More","mentionsErrorTitle":"{mentionsType, select, board {Board} user {User} message {Message} other {}} No Longer Available","mentionsErrorMessage":"The {mentionsType} you are trying to view has been removed from the community.","videoProcessing":"Video is being processed. Please try again in a few minutes.","bannerTitle":"Video provider requires cookies to play the video. Accept to continue or {url} it directly on the provider's site.","buttonTitle":"Accept","urlText":"watch"},"localOverride":false},"CachedAsset:text:en_US-components/messages/MessageTime-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/messages/MessageTime-1743097581414","value":{"postTime":"Published: {time}","lastPublishTime":"Last Update: {time}","conversation.lastPostingActivityTime":"Last posting activity time: {time}","conversation.lastPostTime":"Last post time: {time}","moderationData.rejectTime":"Rejected time: {time}"},"localOverride":false},"CachedAsset:text:en_US-shared/client/components/nodes/NodeIcon-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-shared/client/components/nodes/NodeIcon-1743097581414","value":{"contentType":"Content Type {style, select, FORUM {Forum} BLOG {Blog} TKB {Knowledge Base} IDEA {Ideas} OCCASION {Events} other {}} icon"},"localOverride":false},"CachedAsset:text:en_US-components/messages/MessageUnreadCount-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/messages/MessageUnreadCount-1743097581414","value":{"unread":"{count} unread","comments":"{count, plural, one { unread comment} other{ unread comments}}"},"localOverride":false},"CachedAsset:text:en_US-components/messages/MessageViewCount-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/messages/MessageViewCount-1743097581414","value":{"textTitle":"{count, plural,one {View} other{Views}}","views":"{count, plural, one{View} other{Views}}"},"localOverride":false},"CachedAsset:text:en_US-components/kudos/KudosCount-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/kudos/KudosCount-1743097581414","value":{"textTitle":"{count, plural,one {{messageType, select, IDEA{Vote} other{Like}}} other{{messageType, select, IDEA{Votes} other{Likes}}}}","likes":"{count, plural, one{like} other{likes}}"},"localOverride":false},"CachedAsset:text:en_US-components/messages/MessageRepliesCount-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-components/messages/MessageRepliesCount-1743097581414","value":{"textTitle":"{count, plural,one {{conversationStyle, select, IDEA{Comment} OCCASION{Comment} other{Reply}}} other{{conversationStyle, select, IDEA{Comments} OCCASION{Comments} other{Replies}}}}","comments":"{count, plural, one{Comment} other{Comments}}"},"localOverride":false},"CachedAsset:text:en_US-shared/client/components/users/UserAvatar-1743097581414":{"__typename":"CachedAsset","id":"text:en_US-shared/client/components/users/UserAvatar-1743097581414","value":{"altText":"{login}'s avatar","altTextGeneric":"User's avatar"},"localOverride":false}}}},"page":"/tags/TagPage/TagPage","query":{"nodeId":"board:TechnicalArticles","tagName":"LLM"},"buildId":"q_bLpq2mflH0BeZigxpj6","runtimeConfig":{"buildInformationVisible":false,"logLevelApp":"info","logLevelMetrics":"info","openTelemetryClientEnabled":false,"openTelemetryConfigName":"f5","openTelemetryServiceVersion":"25.2.0","openTelemetryUniverse":"prod","openTelemetryCollector":"http://localhost:4318","openTelemetryRouteChangeAllowedTime":"5000","apolloDevToolsEnabled":false,"inboxMuteWipFeatureEnabled":false},"isFallback":false,"isExperimentalCompile":false,"dynamicIds":["./components/customComponent/CustomComponent/CustomComponent.tsx","./components/community/Navbar/NavbarWidget.tsx","./components/community/Breadcrumb/BreadcrumbWidget.tsx","./components/tags/TagsHeaderWidget/TagsHeaderWidget.tsx","./components/messages/MessageListForNodeByRecentActivityWidget/MessageListForNodeByRecentActivityWidget.tsx","./components/tags/TagSubscriptionAction/TagSubscriptionAction.tsx","./components/customComponent/CustomComponentContent/TemplateContent.tsx","../shared/client/components/common/List/ListGroup/ListGroup.tsx","./components/messages/MessageView/MessageView.tsx","./components/messages/MessageView/MessageViewInline/MessageViewInline.tsx","./components/customComponent/CustomComponentContent/HtmlContent.tsx","./components/customComponent/CustomComponentContent/CustomComponentScripts.tsx"],"appGip":true,"scriptLoader":[]}