diff --git a/common/constants/getting_started_routes.ts b/common/constants/getting_started_routes.ts new file mode 100644 index 000000000..7ecc233c6 --- /dev/null +++ b/common/constants/getting_started_routes.ts @@ -0,0 +1,43 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +export type TutorialId = + | 'otelLogs' + | 'otelMetrics' + | 'otelTraces' + | 'nginx' + | 'java' + | 'python' + | 'golang'; + +export const COMPONENT_MAP: Record = { + otelLogs: 'otel-index-patterns', + otelMetrics: 'otel-index-patterns', + otelTraces: 'otel-index-patterns', + nginx: 'nginx', + java: 'java-tutorial', + python: 'python-tutorial', + golang: 'golang-tutorial', +}; + +export const VERSION_MAP: Record = { + otelLogs: '1.0.0', + otelMetrics: '1.0.0', + otelTraces: '1.0.0', + nginx: '1.0.0', + java: '1.0.0', + python: '1.0.0', + golang: '1.0.0', +}; + +export const SIGNAL_MAP: Record = { + otelLogs: 'Logs', + otelMetrics: 'Metrics', + otelTraces: 'Traces', + nginx: '', + java: '', + python: '', + golang: '', +}; diff --git a/public/components/getting_started/components/getting_started.tsx b/public/components/getting_started/components/getting_started.tsx index 5ebf856fc..944a21184 100644 --- a/public/components/getting_started/components/getting_started.tsx +++ b/public/components/getting_started/components/getting_started.tsx @@ -3,11 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ -import { EuiPage, EuiPageBody, EuiSpacer } from '@elastic/eui'; +import { EuiPage, EuiPageBody } from '@elastic/eui'; import React, { useEffect, useState } from 'react'; import { HomeProps } from 'public/components/getting_started/home'; import { CollectAndShipData } from './getting_started_collectData'; -import { QueryAndAnalyze } from './getting_started_queryAndAnalyze'; import { observabilityGettingStartedTitle } from '../../../../common/constants/shared'; interface ExtendedHomeProps extends HomeProps { @@ -17,10 +16,7 @@ interface ExtendedHomeProps extends HomeProps { export const NewGettingStarted = (props: ExtendedHomeProps) => { const { chrome, selectedDataSourceId, selectedDataSourceLabel } = props; - const [selectedSource, setSelectedSource] = useState(''); const [isPickYourSourceOpen, setIsPickYourSourceOpen] = useState(true); - const [isQueryDataOpen, setIsQueryDataOpen] = useState(false); - const [isSampleDataset, setIsSampleDataset] = useState(false); useEffect(() => { chrome.setBreadcrumbs([ @@ -31,28 +27,8 @@ export const NewGettingStarted = (props: ExtendedHomeProps) => { ]); }, [chrome]); - const handleSelectSource = (source: string) => { - setSelectedSource(source); - }; - const togglePickYourSource = (isOpen: boolean) => { setIsPickYourSourceOpen(isOpen); - if (isOpen) { - setIsQueryDataOpen(false); - } - }; - - const toggleQueryData = (isOpen: boolean) => { - setIsQueryDataOpen(isOpen); - }; - - const setQueryDataOpen = () => { - setIsPickYourSourceOpen(false); - setIsQueryDataOpen(true); - }; - - const handleCardSelectionChange = (isSample: boolean) => { - setIsSampleDataset(isSample); }; return ( @@ -61,23 +37,9 @@ export const NewGettingStarted = (props: ExtendedHomeProps) => { - - {!isSampleDataset && ( - - )} ); diff --git a/public/components/getting_started/components/getting_started_collectData.tsx b/public/components/getting_started/components/getting_started_collectData.tsx index 0d1de005a..0028486cc 100644 --- a/public/components/getting_started/components/getting_started_collectData.tsx +++ b/public/components/getting_started/components/getting_started_collectData.tsx @@ -4,8 +4,6 @@ */ import { - EuiAccordion, - EuiButton, EuiCheckableCard, EuiCodeBlock, EuiFlexGroup, @@ -14,36 +12,42 @@ import { EuiListGroup, EuiListGroupItem, EuiPanel, - EuiSelectable, EuiSpacer, EuiSteps, EuiText, EuiTitle, EuiTabbedContent, + EuiCompressedComboBox, + EuiButton, + EuiIcon, + EuiCard, } from '@elastic/eui'; import React, { useEffect, useState } from 'react'; -import csvFileJson from '../getting_started_artifacts/csv_file/csv_file-1.0.0.json'; import golangClientJson from '../getting_started_artifacts/golang_client/golang_client-1.0.0.json'; -import otelJson from '../getting_started_artifacts/otel-services/otel-services-1.0.0.json'; +import golangIcon from '../getting_started_artifacts/golang_client/static/logo.svg'; +import otelJsonLogs from '../getting_started_artifacts/otel-services/otel-services-1.0.0-logs.json'; +import otelJsonMetrics from '../getting_started_artifacts/otel-services/otel-services-1.0.0-metrics.json'; +import otelJsonTraces from '../getting_started_artifacts/otel-services/otel-services-1.0.0-traces.json'; +import otelIcon from '../getting_started_artifacts/otel-services/static/logo.svg'; import pythonJson from '../getting_started_artifacts/python_client/python_client-1.0.0.json'; +import pythonIcon from '../getting_started_artifacts/python_client/static/logo.png'; import nginxJson from '../getting_started_artifacts/nginx/nginx-1.0.0.json'; +import nginxIcon from '../getting_started_artifacts/nginx/static/logo.svg'; import javaJson from '../getting_started_artifacts/java_client/java_client-1.0.0.json'; +import javaIcon from '../getting_started_artifacts/java_client/static/logo.svg'; -import { IntegrationCards } from './getting_started_integrationCards'; -import { UploadAssets } from './utils'; +import { coreRefs } from '../../../../public/framework/core_refs'; +import { UploadAssets, fetchIndexPatternIds, redirectToDashboards } from './utils'; +import { getWorkspaceIdFromUrl } from '../../../../../../src/core/public/utils'; -const cardOne = 'Collector'; -const cardTwo = 'File Upload'; -const cardThree = 'Configure use-case based content'; +const cardOne = 'Logs'; +const cardTwo = 'Metrics'; +const cardThree = 'Traces'; interface CollectAndShipDataProps { isOpen: boolean; onToggle: (isOpen: boolean) => void; - selectedTechnology: string; - onMoveToQueryData: (indexPatterns: string[]) => void; - onSelectSource: (source: string) => void; - onCardSelectionChange: (isSampleDataset: boolean) => void; selectedDataSourceId: string; selectedDataSourceLabel: string; } @@ -54,11 +58,6 @@ interface CollectorOption { } export const CollectAndShipData: React.FC = ({ - isOpen, - onToggle, - onMoveToQueryData, - onSelectSource, - onCardSelectionChange, selectedDataSourceId, selectedDataSourceLabel, }) => { @@ -68,12 +67,13 @@ export const CollectAndShipData: React.FC = ({ const [selectedTabId, setSelectedTabId] = useState('workflow_0'); const [_selectedWorkflow, setSelectedWorkflow] = useState(''); const [workflows, setWorkflows] = useState([]); - const [selectedCard, setSelectedCard] = useState(''); const [collectorOptions, setCollectorOptions] = useState([]); + const [patternsContent, setPatternsContent] = useState([]); const technologyJsonMap: Record = { - otel: otelJson, - csv: csvFileJson, + otelLogs: otelJsonLogs, + otelMetrics: otelJsonMetrics, + otelTraces: otelJsonTraces, golang: golangClientJson, python: pythonJson, nginx: nginxJson, @@ -81,7 +81,27 @@ export const CollectAndShipData: React.FC = ({ }; useEffect(() => { - if (specificMethod) { + handleCollectionMethodChange(cardOne); + }, []); + + useEffect(() => { + let isMounted = true; + + const fetchPatterns = async () => { + try { + const content = await fetchIndexPatternIds(specificMethod); + if (isMounted) { + setPatternsContent(content.data.length !== 0 ? content.data : []); + } + } catch (error) { + console.error('Error fetching index patterns:', error); + if (isMounted) setPatternsContent([]); + } + }; + + if (specificMethod && isMounted) { + fetchPatterns(); + const json = technologyJsonMap[specificMethod]; if (json && json['getting-started']) { const fetchedWorkflows = json['getting-started'].workflows || []; @@ -95,179 +115,226 @@ export const CollectAndShipData: React.FC = ({ setGettingStarted(null); setWorkflows([]); } - } else { - setGettingStarted(null); - setWorkflows([]); } + + return () => { + isMounted = false; + }; }, [specificMethod]); + const handleSpecificMethodChange = (newOption: any) => { + const selectedOptionValue = newOption[0]?.value; + + if (selectedOptionValue === specificMethod) { + return; + } + + setSpecificMethod(selectedOptionValue); + setSelectedWorkflow(''); + setGettingStarted(null); + setWorkflows([]); + }; + + // Auto-select first collector if nothing is selected and a collection method is set + useEffect(() => { + if (collectorOptions.length > 0 && !specificMethod && collectionMethod) { + handleSpecificMethodChange([{ value: collectorOptions[0].value }]); + } + }, [collectorOptions, specificMethod, collectionMethod]); + const handleCollectionMethodChange = (value: string) => { setCollectionMethod(value); setSpecificMethod(''); setSelectedWorkflow(''); setGettingStarted(null); setWorkflows([]); - onCardSelectionChange(value === cardThree); if (value === cardOne) { setCollectorOptions([ - { label: 'Open Telemetry (structured)', value: 'otel' }, - { label: 'Nginx (structured)', value: 'nginx' }, - { label: 'Java (unstructured)', value: 'java' }, - { label: 'Python (unstructured)', value: 'python' }, - { label: 'Golang (unstructured)', value: 'golang' }, + { label: 'Open Telemetry', value: 'otelLogs' }, + { label: 'Nginx', value: 'nginx' }, + { label: 'Java', value: 'java' }, + { label: 'Python', value: 'python' }, + { label: 'Golang', value: 'golang' }, ]); } else if (value === cardTwo) { - setCollectorOptions([{ label: 'Fluent Bit', value: 'csv' }]); + setCollectorOptions([{ label: 'Open Telemetry', value: 'otelMetrics' }]); + } else if (value === cardThree) { + setCollectorOptions([{ label: 'Open Telemetry', value: 'otelTraces' }]); } }; - const handleSpecificMethodChange = (selectedOption: any) => { - if (!selectedOption) { - return; - } - const updatedOptions = collectorOptions.map((option) => - option.value === selectedOption.value - ? { ...option, checked: 'on' } - : { ...option, checked: undefined } + const renderSpecificMethodDropdown = () => { + if (!collectionMethod) return null; + + const iconMap: Record = { + golang: golangIcon, + otelLogs: otelIcon, + otelMetrics: otelIcon, + otelTraces: otelIcon, + python: pythonIcon, + nginx: nginxIcon, + java: javaIcon, + }; + + const optionsWithIcons = collectorOptions.map((option) => ({ + label: option.label, + value: option.value, + prepend: ( + {`${option.label} + ), + })); + + const selectedOption = optionsWithIcons.find((option) => option.value === specificMethod); + + return ( + <> + + Select telemetry source +
+ handleSpecificMethodChange(newOptions)} + renderOption={(option) => ( +
+ {option.prepend} + {option.label} +
+ )} + /> +
+ ); - setCollectorOptions(updatedOptions); - setSpecificMethod(selectedOption.value); - onSelectSource(selectedOption.value); - setSelectedWorkflow(''); - setGettingStarted(null); - setWorkflows([]); }; - const onTabClick = (tab: any) => { - const workflowIndex = parseInt(tab.id.split('_')[1], 10); - setSelectedTabId(tab.id); - setSelectedWorkflow(workflows[workflowIndex].name); - setGettingStarted(workflows[workflowIndex]); - }; + const renderIndexPatternStep = ( + patternsContentRender: any[], + selectedDataSourceIdRender: string + ) => { + if (!patternsContentRender || patternsContentRender.length === 0) return null; - const renderSpecificMethodDropdown = () => { - if (!collectionMethod) return null; + const handleIndexPatternClick = (patternId: string) => { + const finalPatternId = selectedDataSourceIdRender + ? `mds-${selectedDataSourceIdRender}-objectId-${patternId}` + : patternId; + + const currentUrl = window.location.href; + const workspaceId = getWorkspaceIdFromUrl(currentUrl, coreRefs?.http!.basePath.getBasePath()); + + const workspacePatternId = workspaceId + ? `workspaceId-${workspaceId}-${finalPatternId}` + : finalPatternId; + + coreRefs?.application!.navigateToApp('data-explorer', { + path: `discover#?_a=(discover:(columns:!(_source),isDirty:!f,sort:!()),metadata:(indexPattern:'${workspacePatternId}',view:discover))&_q=(filters:!(),query:(language:kuery,query:''))&_g=(filters:!(),refreshInterval:(pause:!t,value:0),time:(from:now-15m,to:now))`, + }); + }; return ( <> -

Select a collector

+

Query your data in Discover to uncover insights

- - handleSpecificMethodChange(newOptions.find((option) => option.checked)) - } - listProps={{ bordered: true }} - > - {(list) => list} - + + {patternsContentRender.map((pattern) => ( + handleIndexPatternClick(pattern.id)} + style={{ display: 'inline-flex', alignItems: 'center', gap: '8px' }} + > + {pattern.title} + + + } + /> + ))} + ); }; - const renderSteps = (workflow: any) => { - const steps = workflow.steps.map((step: any) => ({ - title: step.name, - children: ( -
- {step.label && ( - -

{step.label}

-
- )} - {step.description} - {step['input-params'] && step['input-params'].length > 0 && ( -
- -

Input Parameters:

-
- {step['input-params'].map((param: any, idx: number) => ( - - {param.name}: {param.description} ({param.type}) - - ))} -
- )} - {step.info && - step.info.map((link: string, linkIndex: number) => ( - - More Info - - ))} - {step.content && ( + const renderSchema = (schemas: any[]) => + schemas.map((schema, idx) => { + const indexPatternName = schema['index-pattern-name'] || ''; + + return ( +
+ +

{schema.type} Schema

+
+ + {schema.description} +
+ {schema.alias && ( + <> + Alias: {schema.alias} +
+ + )} + {indexPatternName && ( + <> + Index Pattern Name: {indexPatternName} +
+ + )} + {Array.isArray(schema.info) && + schema.info.map((link: any, linkIdx: number) => + link && typeof link.url === 'string' ? ( + + {typeof link.title === 'string' && link.title.trim() !== '' + ? link.title + : 'More Info'} + + ) : ( + + Invalid URL + + ) + )} +
+ {schema.content && ( - {step.content} + {schema.content} )} + {schema['index-template'] && ( + + {`${indexPatternName} Index Template`} + + )}
- ), - })); - - steps.push({ - title: 'Schema', - children: renderSchema( - technologyJsonMap[specificMethod]?.['getting-started']?.schema || - technologyJsonMap[specificMethod]?.schema || - [] - ), - }); - - steps.push({ - title: 'Index Patterns', - children: renderIndex( - technologyJsonMap[specificMethod]?.['getting-started']?.['index-patterns'] || - technologyJsonMap[specificMethod]?.['index-patterns'] || - {} - ), + ); }); - return ; - }; - - const renderSchema = (schemas: any[]) => - schemas.map((schema, idx) => ( -
- -

{schema.type} Schema

-
- - {schema.description} -
- Alias: {schema.alias} -
- Index Pattern Name: {schema['index-pattern-name']} -
- {schema.info.map((infoLink: string, linkIdx: number) => ( - - More Info - - ))} -
- - {schema.content} - - - Index Template - - -
- )); - const renderIndex = (indexPatterns: any) => ( <> {indexPatterns?.description}
- {indexPatterns?.info?.map((infoLink: string, linkIdx: number) => ( - - More Info - - ))} + {Array.isArray(indexPatterns?.info) && + indexPatterns.info.map((link: any, linkIdx: number) => + link && typeof link.url === 'string' ? ( + + {typeof link.title === 'string' && link.title.trim() !== '' + ? link.title + : 'More Info'} + + ) : ( + + Invalid URL + + ) + )}
@@ -286,13 +353,144 @@ export const CollectAndShipData: React.FC = ({ > Create assets - - onMoveToQueryData(indexPatterns?.['index-patterns-name'] || [])}> - Move to query and analyze data - ); + const renderTechnologyDashboardCards = (specificMethodRender: string) => { + const baseUrl = `${window.location.origin}/app`; + + const cardData = { + nginx: { + title: 'Nginx Dashboard', + description: 'Analyze logs with pre-packaged dashboards', + icon: Nginx Icon, + url: `${baseUrl}/integrations#/available/nginx/setup`, + }, + }; + + const selectedCard = cardData[specificMethodRender]; + + if (!selectedCard) return null; + + return ( + + window.open(selectedCard.url, '_blank')}> + Install from Catalog + + } + /> + + ); + }; + + const renderVisualizeDataStep = () => { + return ( + + {renderTechnologyDashboardCards(specificMethod)} + + + } + title="Create a new dashboard" + description="Create a new dashboard to visualize your data" + footer={ + redirectToDashboards('dashboards')}> + Create a dashboard + + } + /> + + + ); + }; + + const renderSteps = (workflow: any) => { + const steps = [ + { + title: 'Schema', + children: renderSchema( + technologyJsonMap[specificMethod]?.['getting-started']?.schema || + technologyJsonMap[specificMethod]?.schema || + [] + ), + }, + { + title: 'Index Patterns', + children: renderIndex( + technologyJsonMap[specificMethod]?.['getting-started']?.['index-patterns'] || + technologyJsonMap[specificMethod]?.['index-patterns'] || + {} + ), + }, + ]; + + steps.push( + ...workflow.steps.map((step: any) => ({ + title: step.name, + children: ( +
+ {step.description} + {step['input-params'] && step['input-params'].length > 0 && ( +
+ +

Input Parameters:

+
+ {step['input-params'].map((param: any, idx: number) => ( + + {param.name}: {param.description} + + ))} +
+ )} + + {Array.isArray(step.info) && + step.info.map((link: any, linkIndex: number) => { + if (link && typeof link.url === 'string') { + return ( + + {typeof link.title === 'string' && link.title.trim() !== '' + ? link.title + : 'More Info'} + + ); + } else { + return ( + + Invalid URL + + ); + } + })} + + {step.content && ( + + {step.content} + + )} + +
+ ), + })) + ); + + steps.push({ + title: 'Explore your data', + children: renderIndexPatternStep(patternsContent, selectedDataSourceId), + }); + + steps.push({ + title: 'Visualize your data', + children: renderVisualizeDataStep(), + }); + + return ; + }; + const tabs = workflows.map((workflow, index) => ({ id: `workflow_${index}`, name: workflow.name, @@ -306,83 +504,68 @@ export const CollectAndShipData: React.FC = ({ return ( - - -

Collection method

-
- - - - { - handleCollectionMethodChange(cardOne); - setSelectedCard(cardOne); - }} - > - Configure agents and ingestion pipeline - - - - { - handleCollectionMethodChange(cardTwo); - setSelectedCard(cardTwo); - }} - > - Upload your data - - - - { - handleCollectionMethodChange(cardThree); - setSelectedCard(cardThree); - }} - > - Explore with a log dataset - - - - - {collectionMethod === cardThree ? ( - - ) : ( - <> - {renderSpecificMethodDropdown()} - - {specificMethod && ( - <> - - {tabs.length > 0 && ( - tab.id === selectedTabId)} - onTabClick={onTabClick} - /> - )} - - )} - - )} -
+ + + + { + handleCollectionMethodChange(cardOne); + }} + /> + + + { + handleCollectionMethodChange(cardTwo); + }} + /> + + + { + handleCollectionMethodChange(cardThree); + }} + /> + + + + {renderSpecificMethodDropdown()} + + {specificMethod && ( + <> + + {tabs.length > 0 && ( + <> + +

+ Steps to integrate{' '} + {specificMethod.startsWith('otel') + ? 'OpenTelemetry' + : specificMethod.charAt(0).toUpperCase() + specificMethod.slice(1)}{' '} + {collectionMethod.toLowerCase()} +

+
+ tab.id === selectedTabId)} + onTabClick={(tab) => setSelectedTabId(tab.id)} + /> + + )} + + )}
); }; diff --git a/public/components/getting_started/components/getting_started_integrationCards.tsx b/public/components/getting_started/components/getting_started_integrationCards.tsx deleted file mode 100644 index 360060e56..000000000 --- a/public/components/getting_started/components/getting_started_integrationCards.tsx +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -import { - EuiBadge, - EuiCard, - EuiFieldSearch, - EuiFilterButton, - EuiFilterGroup, - EuiFilterSelectItem, - EuiFlexGroup, - EuiFlexItem, - EuiPopover, - EuiPopoverTitle, - EuiSpacer, - EuiText, -} from '@elastic/eui'; -import React, { useEffect, useState } from 'react'; -import { observabilityIntegrationsID } from '../../../../common/constants/shared'; -import { coreRefs } from '../../../../public/framework/core_refs'; - -export const IntegrationCards = () => { - const [integrationData, setIntegrationData] = useState([]); - const [loading, setLoading] = useState(true); - const [query, setQuery] = useState(''); - const [isPopoverOpen, setIsPopoverOpen] = useState(false); - const [items, setItems] = useState([] as Array<{ name: string; checked: boolean }>); - - useEffect(() => { - async function fetchIntegrationData() { - try { - const response = await coreRefs.http!.get(`/api/integrations/repository`); - const data = response.data.hits; - setIntegrationData(data); - const categories = Array.from(new Set(data.flatMap((item: any) => item.labels ?? []))); - setItems(categories.map((name) => ({ name, checked: false }))); - } catch (error) { - console.error('Error fetching integration data:', error); - } finally { - setLoading(false); - } - } - fetchIntegrationData(); - }, []); - - const updateItem = (index: number) => { - if (!items[index]) { - return; - } - const newItems = [...items]; - newItems[index].checked = !items[index].checked; - setItems(newItems); - }; - - const renderFilters = () => { - const button = ( - setIsPopoverOpen(!isPopoverOpen)} - isSelected={isPopoverOpen} - numFilters={items.length} - hasActiveFilters={!!items.find((item) => item.checked)} - numActiveFilters={items.filter((item) => item.checked).length} - > - Categories - - ); - - return ( - - - setQuery(e.target.value)} - /> - - - - setIsPopoverOpen(false)} - panelPaddingSize="none" - > - - - -
- {items.map((item, index) => ( - updateItem(index)} - > - {item.name} - - ))} -
-
-
-
-
- ); - }; - - const filteredHits = integrationData.filter( - (hit) => - (!query || hit.name.toLowerCase().includes(query.toLowerCase())) && - items.filter((item) => item.checked).every((item) => hit.labels?.includes(item.name)) - ); - - const renderIntegrationCards = () => { - if (loading) { - return Loading...; - } - - if (filteredHits.length === 0) { - return ( - -

No integrations available

-

There are no integrations available at this time.

-
- ); - } - - return ( - - {filteredHits.map((integration, index) => ( - - - ) : ( -
- ) - } - title={integration.displayName || integration.name} - description={integration.description} - data-test-subj={`integration_card_${integration.name.toLowerCase()}`} - titleElement="span" - onClick={() => - coreRefs?.application?.navigateToApp(observabilityIntegrationsID, { - path: `#/available/${integration.name}`, - }) - } - footer={ -
- {integration.labels && - integration.labels.map((label: string, idx: number) => ( - {label} - ))} -
- } - /> - - ))} - - ); - }; - - return ( - <> - {renderFilters()} - - {renderIntegrationCards()} - - ); -}; diff --git a/public/components/getting_started/components/getting_started_queryAndAnalyze.tsx b/public/components/getting_started/components/getting_started_queryAndAnalyze.tsx deleted file mode 100644 index f6c5a0e93..000000000 --- a/public/components/getting_started/components/getting_started_queryAndAnalyze.tsx +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -import React, { useEffect, useState } from 'react'; -import { - EuiAccordion, - EuiButton, - EuiCard, - EuiFlexGroup, - EuiFlexItem, - EuiHorizontalRule, - EuiPanel, - EuiSpacer, - EuiText, -} from '@elastic/eui'; -import { coreRefs } from '../../../../public/framework/core_refs'; -import { fetchDashboardIds, fetchIndexPatternIds, redirectToDashboards } from './utils'; -import { getWorkspaceIdFromUrl } from '../../../../../../src/core/public/utils'; - -interface Pattern { - id: string; - title: string; -} - -interface Dashboard { - id: string; - title: string; -} - -interface QueryAndAnalyzeProps { - isOpen: boolean; - onToggle: (isOpen: boolean) => void; - selectedTechnology: string; - selectedDataSourceId: string; - selectedDataSourceLabel: string; -} - -export const QueryAndAnalyze: React.FC = ({ - isOpen, - onToggle, - selectedTechnology, - selectedDataSourceId, -}) => { - const [patternsContent, setPatternsContent] = useState([]); - const [dashboardsContent, setDashboardsContent] = useState([]); - - const fetchIndexPatternContent = async () => { - try { - const content = await fetchIndexPatternIds(selectedTechnology); - setPatternsContent(content.data.length !== 0 ? content.data : []); - } catch (error) { - console.error('Error fetching index patterns:', error); - setPatternsContent([]); - } - - try { - const content = await fetchDashboardIds(selectedTechnology); - setDashboardsContent(content.data.length !== 0 ? content.data : []); - } catch (error) { - console.error('Error fetching dashboards:', error); - setDashboardsContent([]); - } - }; - - useEffect(() => { - if (selectedTechnology !== '') { - fetchIndexPatternContent(); - } - }, [selectedTechnology, selectedDataSourceId]); - - const handleIndexPatternClick = (patternId: string) => { - const finalPatternId = selectedDataSourceId - ? `mds-${selectedDataSourceId}-objectId-${patternId}` - : patternId; - - const currentUrl = window.location.href; - const workspaceId = getWorkspaceIdFromUrl(currentUrl, coreRefs?.http!.basePath.getBasePath()); - - const workspacePatternId = workspaceId - ? `workspaceId-${workspaceId}-${finalPatternId}` - : finalPatternId; - - coreRefs?.application!.navigateToApp('data-explorer', { - path: `discover#?_a=(discover:(columns:!(_source),isDirty:!f,sort:!()),metadata:(indexPattern:'${workspacePatternId}',view:discover))&_q=(filters:!(),query:(language:kuery,query:''))&_g=(filters:!(),refreshInterval:(pause:!t,value:0),time:(from:now-15m,to:now))`, - }); - }; - - const redirectToDashboardsMDS = (dashboardId: string) => { - const finalDashboardId = selectedDataSourceId - ? `mds-${selectedDataSourceId}-objectId-${dashboardId}` - : dashboardId; - - const currentUrl = window.location.href; - const workspaceId = getWorkspaceIdFromUrl(currentUrl, coreRefs?.http!.basePath.getBasePath()); - - const workspaceDashboardId = workspaceId - ? `workspaceId-${workspaceId}-${finalDashboardId}` - : finalDashboardId; - const dashboardUrl = `#/view/${workspaceDashboardId}`; - - coreRefs?.application!.navigateToApp('dashboards', { - path: dashboardUrl, - }); - }; - - return ( - - - -

-

Explore your data

-

-
- - - {patternsContent.length !== 0 && - patternsContent.map((pattern) => ( - - handleIndexPatternClick(pattern.id)}> - {pattern.title} - - - ))} - - - -

-

Visualize your data

-

-
- - - {dashboardsContent.length !== 0 && - dashboardsContent.map((dashboard) => ( - - } - title={dashboard.title} - description={`Explore the ${dashboard.title} dashboard`} - onClick={() => { - redirectToDashboardsMDS(dashboard.id); - }} - /> - - ))} - - - } - title="Create New Dashboard" - description="Create a new dashboard to visualize your data" - onClick={() => { - redirectToDashboards('dashboards'); - }} - /> - - -
-
- ); -}; diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/README.md b/public/components/getting_started/getting_started_artifacts/csv_file/README.md deleted file mode 100644 index d20db8315..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/README.md +++ /dev/null @@ -1,38 +0,0 @@ -# CSV Upload Integration - -> CSV File based Upload Integration - -## What is CSV Upload ? - -CSV upload is an example of parsing and loading a CSV file into opensearch index using an agent - -## What is CSV Integration ? - -An integration is a bundle of pre-canned assets which are bundled togather in a meaningful manner. - -**_CSV Upload_** integration includes docker live example including getting started instruction of using data-prepper or fluent-bit for -uploading the csv file into a dedicated index using a parser to transform the csv into json - -## Ingesting CVS Using Data-Prepper - ... - -## Ingesting CVS Using Flunet-Bit - -... - ---- -## Loading Integrations via DashboardManagement - -To update an integration template navigate to the DashboardManagement and select [savedObjects](https://localhost:5601/_dashboards/app/management/opensearch-dashboards/objects) and import the new artifact: - -1) Download the `nginx-1.0.0.ndjson` artifact from the [catalog release page](https://github.com/opensearch-project/opensearch-catalog/releases/edit/nginx-1.0.0) - -2) Go to the [DashboardManagement -> savedObjects ](https://localhost:5601/_dashboards/app/management/opensearch-dashboards/objects) - -![](https://github.com/opensearch-project/opensearch-catalog/assets/48943349/d96e9a78-e3de-4cce-ba66-23f7c084778d) - -![](https://github.com/opensearch-project/opensearch-catalog/assets/48943349/a63ae102-706a-4980-b758-fff7f6b24a94) - -3) Once there select import to load the recently downloaded integration artifact (`nginx-1.0.0.ndjson` suffix) - -4) Open the [nginx integration](https://localhost:5601/app/integrations#/available/nginx) and install diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/assets/fluent-bit-csv-upload-1.0.0.ndjson b/public/components/getting_started/getting_started_artifacts/csv_file/assets/fluent-bit-csv-upload-1.0.0.ndjson deleted file mode 100644 index ecdfad483..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/assets/fluent-bit-csv-upload-1.0.0.ndjson +++ /dev/null @@ -1,4 +0,0 @@ -{"attributes":{"description":"upload a csv file example using fluent-bit agent","kibanaSavedObjectMeta":{"searchSourceJSON":"{\"query\":{\"query\":\"\",\"language\":\"kuery\"},\"filter\":[]}"},"title":"fluent-bit-csv-upload","uiStateJSON":"{}","version":1,"visState":"{\"title\":\"fluent-bit-csv-upload\",\"type\":\"markdown\",\"aggs\":[],\"params\":{\"fontSize\":12,\"openLinksInNewTab\":false,\"markdown\":\"# Uploading a CSV File into an OpenSearch Index Using Fluent Bit\\n\\nThis tutorial will guide you through the process of setting up Fluent Bit to monitor a directory for CSV files and upload their contents into an OpenSearch index.\\n\\n## Prerequisites\\n\\n- An OpenSearch instance running and accessible.\\n- Fluent Bit installed on your system.\\n- A directory containing your CSV files.\\n\\n## Step 1: Install Fluent Bit\\n\\n### On Linux:\\n\\n```bash\\ncurl -L https://fluentbit.io/releases/1.8/fluent-bit-1.8.11-linux-x86_64.tar.gz -o fluent-bit.tar.gz\\ntar -xvf fluent-bit.tar.gz\\ncd fluent-bit/bin\\n```\\n\\n### On macOS:\\n\\n```bash\\nbrew install fluent-bit\\n```\\n\\n### On Windows:\\n\\nDownload and extract Fluent Bit from [Fluent Bit releases](https://fluentbit.io/download/).\\n\\n## Step 2: Create Fluent Bit Configuration Files\\n\\n#### Create `fluent-bit.conf`\\n\\nThis is the main configuration file for Fluent Bit. It defines the input source, parser, and output destination.\\n\\n```ini\\n[SERVICE]\\n Flush 1\\n Log_Level info\\n Parsers_File parsers.conf\\n\\n[INPUT]\\n Name tail\\n Path /path/to/your/csv/files/*.csv\\n Parser csv\\n Tag csv\\n Refresh_Interval 5\\n Rotate_Wait 30\\n\\n[OUTPUT]\\n Name opensearch\\n Match *\\n Host your-opensearch-host\\n Port 9200\\n Index csv-index\\n HTTP_User your-username\\n HTTP_Passwd your-password\\n tls off\\n Suppress_Type_Name On\\n tls.verify off\\n```\\n\\n### Create `parsers.conf`\\n\\nThis file defines the CSV parser.\\n\\n```ini\\n[PARSER]\\n Name csv\\n Format regex\\n Regex ^(?[^,]+),(?[^,]+),(?[^,]+),(?[^,]+),(?[^,]+)$\\n Time_Key timestamp\\n Time_Format %Y-%m-%d %H:%M:%S\\n```\\n\\n### Direct the CSV folder location\\n\\nEnsure this file is in the directory you specified in the `Path` of the `fluent-bit.conf` file.\\n\\n\\n## Step 3: Run Fluent Bit\\n\\nNavigate to the directory containing the Fluent Bit executable and the configuration files. Then, start Fluent Bit with the configuration files.\\n\\n```bash\\n/path/to/fluent-bit/bin/fluent-bit -c /path/to/fluent-bit.conf\\n```\\n\\n## Step 4: Verify Data in OpenSearch\\n\\nAfter starting Fluent Bit, you can verify the data ingestion by accessing OpenSearch and searching for the `csv-index` index.\\n\\nFor example, you can use OpenSearch Dashboards or the OpenSearch API to query the index:\\n\\n### Using OpenSearch Dashboards:\\n\\n1. Open OpenSearch Dashboards in your browser.\\n2. Navigate to the \\\"Discover\\\" tab.\\n3. Select the `csv-index` index pattern.\\n4. Verify that the log data from your CSV files is being ingested and displayed.\\n\\n### Using the OpenSearch API:\\n\\n```bash\\ncurl -X GET \\\"http://your-opensearch-host:9200/csv-index/_search?pretty\\\"\\n```\\n\\n---\\n## Live Testing with Docker Compose\\nIf you prefer to test this setup using Docker Compose, you can use the following docker-compose.yml file to quickly set up an OpenSearch instance along with Fluent Bit:\\n\\nUnder the `getting-started` section you can examine a live docker-compose sample:\\n```yaml\\n/csv_file/getting-started/fluent-bit\\n|-- docker-complete.yml\\n|-- data/\\n |-- fluent-bit.conf\\n |-- parsers.conf\\n |-- logs.csv\\n\\n```\\nUse the [docker-compose](../getting-started/fluent-bit/docker-complete.yml) you can find a complete:\\n\\n`docker compose -f docker-complete.yml up -d` would instantiate the services and start sending the csv sample logs into an index. \\n\"}}"},"id":"0fad8910-43d9-11ef-a69e-0549ba61487e","migrationVersion":{"visualization":"7.10.0"},"references":[],"type":"visualization","updated_at":"2024-07-17T16:55:31.713Z","version":"WzEsMV0="} -{"attributes":{"description":"upload a csv file example using fluent-bit agent","hits":0,"kibanaSavedObjectMeta":{"searchSourceJSON":"{\"query\":{\"language\":\"kuery\",\"query\":\"\"},\"filter\":[]}"},"optionsJSON":"{\"hidePanelTitles\":false,\"useMargins\":true}","panelsJSON":"[{\"version\":\"2.15.0\",\"gridData\":{\"x\":0,\"y\":0,\"w\":24,\"h\":15,\"i\":\"22a1a11f-7ecf-46c7-a73d-b6cb5eb07b45\"},\"panelIndex\":\"22a1a11f-7ecf-46c7-a73d-b6cb5eb07b45\",\"embeddableConfig\":{},\"panelRefName\":\"panel_0\"}]","timeRestore":false,"title":"csv-file-upload-fluent-bit-dashboard","version":1},"id":"1e4f1c40-43d9-11ef-a69e-0549ba61487e","migrationVersion":{"dashboard":"7.9.3"},"references":[{"id":"0fad8910-43d9-11ef-a69e-0549ba61487e","name":"panel_0","type":"visualization"}],"type":"dashboard","updated_at":"2024-07-17T16:55:31.713Z","version":"WzIsMV0="} -{"attributes":{"fields":"[{\"count\":0,\"name\":\"@timestamp\",\"type\":\"date\",\"esTypes\":[\"date\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true},{\"count\":0,\"name\":\"_id\",\"type\":\"string\",\"esTypes\":[\"_id\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"count\":0,\"name\":\"_index\",\"type\":\"string\",\"esTypes\":[\"_index\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":false},{\"count\":0,\"name\":\"_score\",\"type\":\"number\",\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"count\":0,\"name\":\"_source\",\"type\":\"_source\",\"esTypes\":[\"_source\"],\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"count\":0,\"name\":\"_type\",\"type\":\"string\",\"scripted\":false,\"searchable\":false,\"aggregatable\":false,\"readFromDocValues\":false},{\"count\":0,\"name\":\"application\",\"type\":\"string\",\"esTypes\":[\"text\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"count\":0,\"name\":\"application.keyword\",\"type\":\"string\",\"esTypes\":[\"keyword\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true,\"subType\":{\"multi\":{\"parent\":\"application\"}}},{\"count\":0,\"name\":\"host\",\"type\":\"string\",\"esTypes\":[\"text\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"count\":0,\"name\":\"host.keyword\",\"type\":\"string\",\"esTypes\":[\"keyword\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true,\"subType\":{\"multi\":{\"parent\":\"host\"}}},{\"count\":0,\"name\":\"log_level\",\"type\":\"string\",\"esTypes\":[\"text\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"count\":0,\"name\":\"log_level.keyword\",\"type\":\"string\",\"esTypes\":[\"keyword\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true,\"subType\":{\"multi\":{\"parent\":\"log_level\"}}},{\"count\":0,\"name\":\"message\",\"type\":\"string\",\"esTypes\":[\"text\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"count\":0,\"name\":\"message.keyword\",\"type\":\"string\",\"esTypes\":[\"keyword\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true,\"subType\":{\"multi\":{\"parent\":\"message\"}}},{\"count\":0,\"name\":\"timestamp\",\"type\":\"string\",\"esTypes\":[\"text\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":false,\"readFromDocValues\":false},{\"count\":0,\"name\":\"timestamp.keyword\",\"type\":\"string\",\"esTypes\":[\"keyword\"],\"scripted\":false,\"searchable\":true,\"aggregatable\":true,\"readFromDocValues\":true,\"subType\":{\"multi\":{\"parent\":\"timestamp\"}}}]","timeFieldName":"@timestamp","title":"logs-*"},"id":"csv-getting-started-tutorial-1.0.0","migrationVersion":{"index-pattern":"7.6.0"},"references":[],"type":"index-pattern","updated_at":"2024-07-17T16:59:06.006Z","version":"WzMsMV0="} -{"exportedCount":3,"missingRefCount":0,"missingReferences":[]} \ No newline at end of file diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/csv_file-1.0.0.json b/public/components/getting_started/getting_started_artifacts/csv_file/csv_file-1.0.0.json deleted file mode 100644 index 8e772dc7f..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/csv_file-1.0.0.json +++ /dev/null @@ -1,166 +0,0 @@ -{ - "name": "csv", - "version": "1.0.0", - "displayName": "csv-file", - "description": "Upload a CSV file", - "license": "Apache-2.0", - "type": "logs", - "labels": ["Logs", "Unstructured"], - "author": "OpenSearch", - "sourceUrl": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/csv_file", - "workflows": [ - { - "name": "dashboards", - "label": "Dashboards & Visualizations", - "description": "Dashboards and indices that enable you to easily visualize important metrics.", - "enabled_by_default": false - } - ], - "statics": { - "logo": { - "annotation": "CSV Logo", - "path": "logo.svg" - }, - "gallery": [ - { - "annotation": "Fluent-Bit getting started tutorial", - "path": "fluent-bit-getting-started-dashboard.png", - "tags":["dashboard","tutorial"], - "savedObject-id": "csv-getting-started-tutorial-1.0.0" - }, - { - "annotation": "Data-Prepper Logo", - "path": "data-pepper.png" - }, - { - "annotation": "Fluent-Bit Logo", - "path": "fluentbit.png" - } - ] - }, - "components": [], - "getting-started": { - "info": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/csv_file/getting-started/Getting-Started.md", - "ingestion": ["fluent-bit","data-prepper"], - "structured": "true", - "technology": "csv", - "protocol": [], - "live-sample": "true", - "workflows": [ - { - "name": "QuickStart", - "description": "This is a docker-composed based getting started live example with CSV file upload", - "steps": [ - { - "name": "Create docker-network", - "label": "Environment setting", - "phase": "docker", - "type": "console-cmd", - "content": "docker network create opensearch-net", - "description": "Before running any docker-compose files, create the opensearch-net network" - }, - { - "name": "Setup docker .env file", - "label": "Environment Parameters", - "phase": "docker", - "type": "file-url", - "input-params": [ - { - "name": "OPENSEARCH_HOST", - "description": "Opensearch host", - "type": "host-name" - }, - { - "name": "OPENSEARCH_DASHBOARD_HOST", - "description": "Opensearch Dashboard host", - "type": "host-name" - } - ], - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/csv_file/getting-started/.env"], - "content": "wget https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/csv_file/getting-started/.env", - "description": "Setup docker-compose env variables " - }, - { - "name": "Setup fluent-bit folder", - "label": "Environment Parameters", - "phase": "docker", - "type": "file-url", - "info": ["https://github.com/opensearch-project/opensearch-catalog/tree/main/integrations/observability/csv_file/getting-started/fluent-bit/fluent-bit.conf", - "https://github.com/opensearch-project/opensearch-catalog/tree/main/integrations/observability/csv_file/getting-started/fluent-bit/parsers.conf" - ], - "content": "mkdir -p fluent-bit\nwget -P fluent-bit https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/csv_file/getting-started/fluent-bit/fluent-bit.conf \\\n https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/csv_file/getting-started/fluent-bit/parsers.conf\n", - "description": "Setup docker-compose fluent-bit's service configuration " - }, - { - "name": "Run docker-compose", - "label": "live container", - "type": "file-url", - "phase": "docker", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/csv_file/getting-started/docker-compose.yml"], - "description": "Run docker-compose for the nginx live example and see the generated index", - "content": "wget -O docker-compose.yml https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/csv_file/getting-started/docker-compose.yml && docker-compose up -d fluent-bit \n" - } - ] - }, - { - "name": "Fluent-bit file upload", - "description": "This is a fluent-bit based CSV file upload getting started instructions tutorial", - "steps": [ - { - "name": "Fluent-Bit Parser", - "type": "console-cmd", - "phase": "ingestion", - "label": "Log Parsing", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/csv_file/getting-started/fluent-bit/parsers.conf"], - "description": "Setup Fluent-Bit parser config file parsing csv file", - "content": "[PARSER]\n Name csv\n Format regex\n Regex ^(?[^,]+),(?[^,]+),(?[^,]+),(?[^,]+),(?[^,]+)$\n Time_Key timestamp\n Time_Format %Y-%m-%d %H:%M:%S\n" - }, - { - "name": "Fluent-Bit Setup", - "type": "console-cmd", - "phase": "ingestion", - "label": "Agent Set-Up", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/csv_file/getting-started/fluent-bit/fluent-bit.conf"], - "description": "Setup Fluent-Bit conf file including logs parsing and OpenSearch access", - "input-params": [ - { - "name": "opensearch-node1", - "description": "Opensearch host", - "type": "host-name" - }, - { - "name": "ss4o_logs-nginx-prod", - "description": "logs sink index name", - "type": "index-name" - } - ], - "content": "[SERVICE]\n Flush 1\n Log_Level info\n Parsers_File parsers.conf\n\n[INPUT]\n Name tail\n Path /fluent-bit/data/*.csv\n Parser csv\n Tag csv\n\n[INPUT]\n Name dummy\n Dummy {\"timestamp\":\"2024-07-16 12:09:00\", \"log_level\":\"INFO\", \"message\":\"Dummy log message\", \"application\":\"App2\", \"host\":\"host2\"}\n Tag dummy\n\n[OUTPUT]\n Name opensearch\n Host opensearch-node1\n Match *\n Port 9200\n Type _doc\n Index logs-index\n tls On\n tls.verify Off\n Suppress_Type_Name On\n HTTP_User admin\n HTTP_Passwd my_%New%_passW0rd!@#" - } - ] - } - ], - "schema": [ - { - "type": "csv-logs", - "info": ["https://opensearch.org/docs/latest/im-plugin/index-templates"], - "content": "PUT _index_template/csv_logs_template", - "description": "Setup sample csv based schema logs index template in the dev console", - "index-template": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/csv_file/schema/csv-logs-1.0.0.mapping.json", - "index-pattern-name": "logs-*" - } - ], - "index-patterns": { - "type": ["logs"], - "info": ["https://opensearch.org/docs/latest/dashboards/management/index-patterns/"], - "description": "Import index patterns `.ndjson` file to the saved objects", - "index-pattern": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/csv_file/assets/fluent-bit-csv-upload-1.0.0.ndjson", - "index-patterns-name": ["logs-*"] - } - }, - "assets": [ - { "name": "fluent-bit-csv-upload", "version": "1.0.0", "extension": "ndjson", "type": "savedObjectBundle", "workflows": ["dashboards"] } - ], - "sampleData": { - "path": "logs.csv" - } -} \ No newline at end of file diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/data/logs.csv b/public/components/getting_started/getting_started_artifacts/csv_file/data/logs.csv deleted file mode 100644 index d0136c721..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/data/logs.csv +++ /dev/null @@ -1,10 +0,0 @@ -2024-07-16 12:00:00,INFO,Application started successfully,App1,host1 -2024-07-16 12:01:00,DEBUG,User logged in,App1,host1 -2024-07-16 12:01:05,ERROR,Failed to load resource,App1,host1 -2024-07-16 12:02:00,WARN,Deprecated API used,App1,host1 -2024-07-16 12:03:00,INFO,Background job executed,App1,host1 -2024-07-16 12:04:00,DEBUG,Cache cleared,App1,host1 -2024-07-16 12:05:00,INFO,User logged out,App1,host1 -2024-07-16 12:06:00,ERROR,Database connection failed,App1,host1 -2024-07-16 12:07:00,INFO,Application shutdown initiated,App1,host1 -2024-07-16 12:08:00,INFO,Application shutdown completed,App1,host1 diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/.env b/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/.env deleted file mode 100644 index 60ee804eb..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/.env +++ /dev/null @@ -1,18 +0,0 @@ -# Nginx Proxy -NGINX_PORT=90 -NGINX_ADDR=nginx:${NGINX_PORT} - -# OpenSearch version -OPENSEARCH_VERSION=2.15.0 -OPENSEARCH_ADMIN_PASSWORD=my_%New%_passW0rd!@# -OPENSEARCH_INITIAL_ADMIN_PASSWORD=my_%New%_passW0rd!@# - -# OpenSearch Node1 -OPENSEARCH_PORT=9200 -OPENSEARCH_HOST=opensearch -OPENSEARCH_ADDR=${OPENSEARCH_HOST}:${OPENSEARCH_PORT} - -# OpenSearch Dashboard -OPENSEARCH_DASHBOARD_PORT=5601 -OPENSEARCH_DASHBOARD_HOST=opensearch-dashboards -OPENSEARCH_DASHBOARD_ADDR=${OPENSEARCH_DASHBOARD_HOST}:${OPENSEARCH_DASHBOARD_PORT} diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/data-prepper/pipeline.yml b/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/data-prepper/pipeline.yml deleted file mode 100644 index fdd41819d..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/data-prepper/pipeline.yml +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright The OpenTelemetry Authors -# SPDX-License-Identifier: Apache-2.0 - -csv-pipeline: - source: - file: - path: "/full/path/to/ingest.csv" - record_type: "event" - processor: - - csv: - column_names: ["col1", "col2", "col3"] - sink: - - opensearch: - hosts: ["https://opensearch-node1:9200"] - username: "admin" - password: "my_%New%_passW0rd!@#" - insecure: true - index_type: custom - index: logs-index - bulk_size: 4 diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/docker-complete.yml b/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/docker-complete.yml deleted file mode 100644 index 582f5f52d..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/docker-complete.yml +++ /dev/null @@ -1,58 +0,0 @@ -version: '3.8' - -services: - opensearch-dashboards: - image: opensearchproject/opensearch-dashboards:${OPENSEARCH_VERSION} - container_name: opensearch-dashboards - ports: - - 5601:5601 - environment: - OPENSEARCH_HOSTS: '["https://opensearch-node1:9200"]' - depends_on: - - opensearch-node1 - volumes: - - ./opensearch_dashboards.yml:/usr/share/opensearch-dashboards/config/opensearch_dashboards.yml - networks: - - opensearch-net - - opensearch-node1: - image: opensearchproject/opensearch:${OPENSEARCH_VERSION} - container_name: opensearch-node1 - environment: - - cluster.name=my-cluster - - node.name=opensearch-node1 - - discovery.seed_hosts=opensearch-node1 - - cluster.initial_master_nodes=opensearch-node1 - - bootstrap.memory_lock=true - - plugins.query.datasources.encryption.masterkey=8e3f206ea7c07cc1bfc5cf40 - - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" - - "OPENSEARCH_INITIAL_ADMIN_PASSWORD=${OPENSEARCH_ADMIN_PASSWORD}" - ulimits: - memlock: - soft: -1 - hard: -1 - volumes: - - opensearch-data:/usr/share/opensearch/data - ports: - - 9200:9200 - - 9600:9600 - networks: - - opensearch-net - - fluent-bit: - container_name: fluent-bit - volumes: - - ./logs:/logs - - ./fluent-bit:/fluent-bit/etc - ports: - - "24224:24224" - - "24224:24224/udp" - networks: - - opensearch-net - -volumes: - opensearch-data: - -networks: - opensearch-net: - driver: bridge diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit.yml b/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit.yml deleted file mode 100644 index 16f95f8b3..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit.yml +++ /dev/null @@ -1,21 +0,0 @@ -version: '3.8' - -services: - fluent-bit: - container_name: fluent-bit - image: fluent/fluent-bit:latest - volumes: - - ./logs:/logs - - ./fluent-bit:/fluent-bit/etc - ports: - - "24224:24224" - - "24224:24224/udp" - networks: - - opensearch-net - -volumes: - opensearch-data: - -networks: - opensearch-net: - driver: bridge diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit/fluent-bit.conf b/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit/fluent-bit.conf deleted file mode 100644 index 9a23d90c7..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit/fluent-bit.conf +++ /dev/null @@ -1,28 +0,0 @@ -[SERVICE] - Flush 1 - Log_Level info - Parsers_File parsers.conf - -[INPUT] - Name tail - Path *.csv - Parser csv - Tag csv - -[INPUT] - Name dummy - Dummy {"timestamp":"2024-07-16 12:09:00", "log_level":"INFO", "message":"Dummy log message", "application":"App2", "host":"host2"} - Tag dummy - -[OUTPUT] - Name opensearch - Host opensearch-node1 - Match * - Port 9200 - Type _doc - Index logs-index - tls On - tls.verify Off - Suppress_Type_Name On - HTTP_User admin - HTTP_Passwd my_%New%_passW0rd!@# diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit/logs.csv b/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit/logs.csv deleted file mode 100644 index d0136c721..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit/logs.csv +++ /dev/null @@ -1,10 +0,0 @@ -2024-07-16 12:00:00,INFO,Application started successfully,App1,host1 -2024-07-16 12:01:00,DEBUG,User logged in,App1,host1 -2024-07-16 12:01:05,ERROR,Failed to load resource,App1,host1 -2024-07-16 12:02:00,WARN,Deprecated API used,App1,host1 -2024-07-16 12:03:00,INFO,Background job executed,App1,host1 -2024-07-16 12:04:00,DEBUG,Cache cleared,App1,host1 -2024-07-16 12:05:00,INFO,User logged out,App1,host1 -2024-07-16 12:06:00,ERROR,Database connection failed,App1,host1 -2024-07-16 12:07:00,INFO,Application shutdown initiated,App1,host1 -2024-07-16 12:08:00,INFO,Application shutdown completed,App1,host1 diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit/parsers.conf b/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit/parsers.conf deleted file mode 100644 index 056a359d0..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/fluent-bit/parsers.conf +++ /dev/null @@ -1,6 +0,0 @@ -[PARSER] - Name csv - Format regex - Regex ^(?[^,]+),(?[^,]+),(?[^,]+),(?[^,]+),(?[^,]+)$ - Time_Key timestamp - Time_Format %Y-%m-%d %H:%M:%S diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/logs/logs.csv b/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/logs/logs.csv deleted file mode 100644 index d0136c721..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/logs/logs.csv +++ /dev/null @@ -1,10 +0,0 @@ -2024-07-16 12:00:00,INFO,Application started successfully,App1,host1 -2024-07-16 12:01:00,DEBUG,User logged in,App1,host1 -2024-07-16 12:01:05,ERROR,Failed to load resource,App1,host1 -2024-07-16 12:02:00,WARN,Deprecated API used,App1,host1 -2024-07-16 12:03:00,INFO,Background job executed,App1,host1 -2024-07-16 12:04:00,DEBUG,Cache cleared,App1,host1 -2024-07-16 12:05:00,INFO,User logged out,App1,host1 -2024-07-16 12:06:00,ERROR,Database connection failed,App1,host1 -2024-07-16 12:07:00,INFO,Application shutdown initiated,App1,host1 -2024-07-16 12:08:00,INFO,Application shutdown completed,App1,host1 diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/opensearch_dashboards.yml b/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/opensearch_dashboards.yml deleted file mode 100644 index e386ddbc1..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/getting-started/opensearch_dashboards.yml +++ /dev/null @@ -1,10 +0,0 @@ -opensearch.hosts: ["https://opensearch-node1:9200"] -server.host: 0.0.0.0 -opensearch.ssl.verificationMode: none -opensearch.username: "admin" -opensearch.password: "my_%New%_passW0rd!@#" -opensearch.requestHeadersWhitelist: [ authorization,securitytenant ] -opensearch_security.multitenancy.enabled: false -opensearch_security.multitenancy.tenants.preferred: ["Private", "Global"] -opensearch_security.readonly_mode.roles: ["kibana_read_only"] -vis_type_vega.enableExternalUrls: true diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/info/Getting-Started.md b/public/components/getting_started/getting_started_artifacts/csv_file/info/Getting-Started.md deleted file mode 100644 index 344b66f84..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/info/Getting-Started.md +++ /dev/null @@ -1,189 +0,0 @@ -# Uploading a CSV File into an OpenSearch Index Using Fluent Bit - -This tutorial will guide you through the process of setting up Fluent Bit to monitor a directory for CSV files and upload their contents into an OpenSearch index. - -## Prerequisites - -- An OpenSearch instance running and accessible. -- Fluent Bit installed on your system. -- A directory containing your CSV files. - -## Step 1: Install Fluent Bit - -### On Linux: - -```bash -curl -L https://fluentbit.io/releases/1.8/fluent-bit-1.8.11-linux-x86_64.tar.gz -o fluent-bit.tar.gz -tar -xvf fluent-bit.tar.gz -cd fluent-bit/bin -``` - -### On macOS: - -```bash -brew install fluent-bit -``` - -### On Windows: - -Download and extract Fluent Bit from [Fluent Bit releases](https://fluentbit.io/download/). - -## Step 2: Create Fluent Bit Configuration Files - -#### Create `fluent-bit.conf` - -This is the main configuration file for Fluent Bit. It defines the input source, parser, and output destination. - -```ini -[SERVICE] - Flush 1 - Log_Level info - Parsers_File parsers.conf - -[INPUT] - Name tail - Path /path/to/your/csv/files/*.csv - Parser csv - Tag csv - Refresh_Interval 5 - Rotate_Wait 30 - -[OUTPUT] - Name opensearch - Match * - Host your-opensearch-host - Port 9200 - Index csv-index - HTTP_User your-username - HTTP_Passwd your-password - tls off - Suppress_Type_Name On - tls.verify off -``` - -### Create `parsers.conf` - -This file defines the CSV parser. - -```ini -[PARSER] - Name csv - Format regex - Regex ^(?[^,]+),(?[^,]+),(?[^,]+),(?[^,]+),(?[^,]+)$ - Time_Key timestamp - Time_Format %Y-%m-%d %H:%M:%S -``` - -### Direct the CSV folder location - -Ensure this file is in the directory you specified in the `Path` of the `fluent-bit.conf` file. - - -## Step 3: Run Fluent Bit - -Navigate to the directory containing the Fluent Bit executable and the configuration files. Then, start Fluent Bit with the configuration files. - -```bash -/path/to/fluent-bit/bin/fluent-bit -c /path/to/fluent-bit.conf -``` - -## Step 4: Verify Data in OpenSearch - -After starting Fluent Bit, you can verify the data ingestion by accessing OpenSearch and searching for the `csv-index` index. - -For example, you can use OpenSearch Dashboards or the OpenSearch API to query the index: - -### Using OpenSearch Dashboards: - -1. Open OpenSearch Dashboards in your browser. -2. Navigate to the "Discover" tab. -3. Select the `csv-index` index pattern. -4. Verify that the log data from your CSV files is being ingested and displayed. - -### Using the OpenSearch API: - -```bash -curl -X GET "http://your-opensearch-host:9200/csv-index/_search?pretty" -``` - ---- -## Live Testing with Docker Compose -If you prefer to test this setup using Docker Compose, you can use the following docker-compose.yml file to quickly set up an OpenSearch instance along with Fluent Bit: - -Under the `getting-started` section you can examine a live docker-compose sample: -```yaml -/csv_file/getting-started/fluent-bit -|-- docker-complete.yml -|-- data/ - |-- fluent-bit.conf - |-- parsers.conf - |-- logs.csv - -``` -Use the [docker-compose](../getting-started/docker-complete.yml) you can find a complete: - -`docker compose -f docker-complete.yml up -d` would instantiate the services and start sending the csv sample logs into an index. - ---- -# Data-Prepper CSV Processor Tutorial - -The `csv` processor parses comma-separated values (CSVs) from the event into columns. - -## Configuration Options - -- **source** (String): The field in the event to be parsed. Default is `message`. -- **quote_character** (String): The text qualifier for a single column. Default is `"`. -- **delimiter** (String): The character separating each column. Default is `,`. -- **delete_header** (Boolean): Deletes the event header after parsing. Default is true. -- **column_names_source_key** (String): Specifies the CSV column names. -- **column_names** (List): User-specified column names. - -## Usage Examples - -### User-specified Column Names - -```yaml -csv-pipeline: - source: - file: - path: "/full/path/to/ingest.csv" - record_type: "event" - processor: - - csv: - column_names: ["col1", "col2"] - sink: - - stdout: -``` - -### Automatically Detect Column Names - -```yaml -csv-s3-pipeline: - source: - s3: - notification_type: "sqs" - codec: - newline: - skip_lines: 1 - header_destination: "header" - compression: none - sqs: - queue_url: "https://sqs..amazonaws.com//" - aws: - region: "" - processor: - - csv: - column_names_source_key: "header" - sink: - - stdout: -``` - -## Metrics - -- **recordsIn**: Ingress records count. -- **recordsOut**: Egress records count. -- **timeElapsed**: Execution time. -- **csvInvalidEvents**: Count of invalid events. - -For more details, visit the [CSV Processor Documentation](https://opensearch.org/docs/latest/data-prepper/pipelines/configuration/processors/csv/). -``` \ No newline at end of file diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/schema/csv-logs-1.0.0.mapping.json b/public/components/getting_started/getting_started_artifacts/csv_file/schema/csv-logs-1.0.0.mapping.json deleted file mode 100644 index 72af9ec29..000000000 --- a/public/components/getting_started/getting_started_artifacts/csv_file/schema/csv-logs-1.0.0.mapping.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "index_patterns": ["logs-*"], - "template": { - "settings": { - "number_of_shards": 1, - "number_of_replicas": 1 - }, - "mappings": { - "properties": { - "timestamp": { - "type": "date", - "format": "yyyy-MM-dd HH:mm:ss" - }, - "log_level": { - "type": "keyword" - }, - "message": { - "type": "text" - }, - "application": { - "type": "keyword" - }, - "host": { - "type": "keyword" - } - } - } - } -} diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/static/data-pepper.png b/public/components/getting_started/getting_started_artifacts/csv_file/static/data-pepper.png deleted file mode 100644 index 9d6939709..000000000 Binary files a/public/components/getting_started/getting_started_artifacts/csv_file/static/data-pepper.png and /dev/null differ diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/static/fluent-bit-getting-started-dashboard.png b/public/components/getting_started/getting_started_artifacts/csv_file/static/fluent-bit-getting-started-dashboard.png deleted file mode 100644 index b3a87aee1..000000000 Binary files a/public/components/getting_started/getting_started_artifacts/csv_file/static/fluent-bit-getting-started-dashboard.png and /dev/null differ diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/static/fluentbit.png b/public/components/getting_started/getting_started_artifacts/csv_file/static/fluentbit.png deleted file mode 100644 index aae238e11..000000000 Binary files a/public/components/getting_started/getting_started_artifacts/csv_file/static/fluentbit.png and /dev/null differ diff --git a/public/components/getting_started/getting_started_artifacts/csv_file/static/logo.png b/public/components/getting_started/getting_started_artifacts/csv_file/static/logo.png deleted file mode 100644 index d9bf6b551..000000000 Binary files a/public/components/getting_started/getting_started_artifacts/csv_file/static/logo.png and /dev/null differ diff --git a/public/components/getting_started/getting_started_artifacts/golang_client/golang_client-1.0.0.json b/public/components/getting_started/getting_started_artifacts/golang_client/golang_client-1.0.0.json index d02782d9f..b67136daf 100644 --- a/public/components/getting_started/getting_started_artifacts/golang_client/golang_client-1.0.0.json +++ b/public/components/getting_started/getting_started_artifacts/golang_client/golang_client-1.0.0.json @@ -56,74 +56,7 @@ "live-sample": "true", "workflows": [ { - "name": "Quick Start", - "description": "This is a golang-client fluent-bit based docker quick starting instructions tutorial with a live example", - "steps": [ - { - "name": "Create docker-network", - "label": "Environment setting", - "phase": "docker", - "type": "console-cmd", - "content": "docker network create opensearch-net", - "description": "Before running any docker-compose files, create the opensearch-net network" - }, - { - "name": "Setup docker .env file", - "label": "Environment Parameters", - "phase": "docker", - "type": "file-url", - "input-params": [ - { - "name": "OPENSEARCH_HOST", - "description": "Opensearch host", - "type": "host-name" - }, - { - "name": "OPENSEARCH_DASHBOARD_HOST", - "description": "Opensearch Dashboard host", - "type": "host-name" - } - ], - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/golang_client/getting-started/.env"], - "content": "wget https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/golang_client/getting-started/.env", - "description": "Setup docker-compose env variables " - }, - { - "name": "Setup fluent-bit folder", - "label": "Environment Parameters", - "phase": "docker", - "type": "file-url", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/golang_client/getting-started/fluent-bit/fluent-bit.conf", - "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/golang_client/getting-started/fluent-bit/parsers.conf" - ], - "content": "mkdir -p fluent-bit\nwget -P fluent-bit https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/golang_client/getting-started/fluent-bit/fluent-bit.conf \\\n https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/golang_client/getting-started/fluent-bit/parsers.conf\n", - "description": "Setup docker-compose fluent-bit's service configuration " - }, - { - "name": "Setup golang app folder", - "label": "Environment Parameters", - "phase": "docker", - "type": "file-url", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/golang_client/getting-started/golang-app/Dockerfile", - "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/golang_client/getting-started/golang-app/go.mod", - "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/golang_client/getting-started/golang-app/main.go" - ], - "content": "mkdir -p golang-app\nwget -P golang-app https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/golang_client/getting-started/golang-app/Dockerfile \\\n https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/golang_client/getting-started/golang-app/go.mod \\\n https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/golang_client/getting-started/golang-app/main.go \n", - "description": "Setup docker-compose golan-app service configuration " - }, - { - "name": "Run docker-compose", - "label": "live container", - "type": "file-url", - "phase": "docker", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/golang_client/getting-started/docker-compose.yml"], - "description": "Run docker-compose for the gloang-client live example and see the generated index", - "content": "wget -O docker-compose.yml https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/golang_client/getting-started/docker-compose.yml && docker compose up --build -d golang-app fluent-bit \n" - } - ] - }, - { - "name": "Go-client Setup", + "name": "Self managed", "description": "This is a Go-based getting started instructions tutorial", "steps": [ { @@ -221,9 +154,6 @@ "schema": [ { "type": "logs", - "info": [ - "https://opensearch.org/docs/latest/im-plugin/index-templates" - ], "content": "PUT _index_template/application_logs_template", "description": "Setup applicative logs index template in the dev console", "index-template": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/golang_client/schemas/applicative-logs-1.0.0.mapping.json", @@ -235,7 +165,10 @@ "logs" ], "info": [ - "https://opensearch.org/docs/latest/dashboards/management/index-patterns/" + { + "url": "https://opensearch.org/docs/latest/dashboards/management/index-patterns/", + "title": "Index Pattern Management Guide" + } ], "description": "Import index patterns `.ndjson` file to the saved objects", "index-pattern": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/golang_client/assets/tutorial-1.0.0.ndjson", diff --git a/public/components/getting_started/getting_started_artifacts/golang_client/static/logo.svg b/public/components/getting_started/getting_started_artifacts/golang_client/static/logo.svg new file mode 100644 index 000000000..95702ed49 --- /dev/null +++ b/public/components/getting_started/getting_started_artifacts/golang_client/static/logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/components/getting_started/getting_started_artifacts/java_client/java_client-1.0.0.json b/public/components/getting_started/getting_started_artifacts/java_client/java_client-1.0.0.json index 1db5a4682..f436e576f 100644 --- a/public/components/getting_started/getting_started_artifacts/java_client/java_client-1.0.0.json +++ b/public/components/getting_started/getting_started_artifacts/java_client/java_client-1.0.0.json @@ -42,74 +42,7 @@ "live-sample": "true", "workflows": [ { - "name": "Quick Start", - "description": "This is a java-client fluent-bit based docker quick starting instructions tutorial with a live example", - "steps": [ - { - "name": "Create docker-network", - "label": "Environment setting", - "phase": "docker", - "type": "console-cmd", - "content": "docker network create opensearch-net", - "description": "Before running any docker-compose files, create the opensearch-net network" - }, - { - "name": "Setup docker .env file", - "label": "Environment Parameters", - "phase": "docker", - "type": "file-url", - "input-params": [ - { - "name": "OPENSEARCH_HOST", - "description": "Opensearch host", - "type": "host-name" - }, - { - "name": "OPENSEARCH_DASHBOARD_HOST", - "description": "Opensearch Dashboard host", - "type": "host-name" - } - ], - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/java_client/getting-started/.env"], - "content": "wget https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/java_client/getting-started/.env", - "description": "Setup docker-compose env variables " - }, - { - "name": "Setup fluent-bit folder", - "label": "Environment Parameters", - "phase": "docker", - "type": "file-url", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/java_client/getting-started/fluent-bit/fluent-bit.conf", - "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/java_client/getting-started/fluent-bit/parsers.conf" - ], - "content": "mkdir -p fluent-bit\nwget -P fluent-bit https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/java_client/getting-started/fluent-bit/fluent-bit.conf \\\n https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/java_client/getting-started/fluent-bit/parsers.conf \n", - "description": "Setup docker-compose fluent-bit's service configuration " - }, - { - "name": "Setup java app folder", - "label": "Environment Parameters", - "phase": "docker", - "type": "file-url", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/java_client/getting-started/java-app/Dockerfile", - "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/java_client/getting-started/java-app/src/main/java/App.java", - "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/java_client/getting-started/java-app/pom.xml" - ], - "content": "mkdir -p java-app/src/main/java\n\nwget -P java-app \\\n https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/java_client/getting-started/java-app/Dockerfile\n\nwget -P java-app \\\n https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/java_client/getting-started/java-app/pom.xml\n\nwget -P java-app/src/main/java \\\n https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/java_client/getting-started/java-app/src/main/java/App.java\n", - "description": "Setup docker-compose java-app service configuration " - }, - { - "name": "Run docker-compose", - "label": "live container", - "type": "file-url", - "phase": "docker", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/java_client/getting-started/docker-compose.yml"], - "description": "Run docker-compose for the java-client live example and see the generated index", - "content": "wget -O docker-compose.yml https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/java_client/getting-started/docker-compose.yml && docker-compose up --build -d java-app fluent-bit \n" - } - ] - }, - { - "name": "java-client", + "name": "Self managed", "description": "This is a fluent-bit based getting started instructions tutorial", "steps": [ { @@ -175,7 +108,10 @@ "type": "source" } ], - "content": "String index = \"sample-index\";\nCreateIndexRequest createIndexRequest = new CreateIndexRequest.Builder().index(index).build();\nclient.indices().create(createIndexRequest);\n\nIndexSettings indexSettings = new IndexSettings.Builder().autoExpandReplicas(\"0-all\").build();\nPutIndicesSettingsRequest putIndicesSettingsRequest = new PutIndicesSettingsRequest.Builder().index(index).value(indexSettings).build();\nclient.indices().putSettings(putIndicesSettingsRequest);" + "content": "String index = \"sample-index\";\nCreateIndexRequest createIndexRequest = new CreateIndexRequest.Builder().index(index).build();\nclient.indices().create(createIndexRequest);\n\nIndexSettings indexSettings = new IndexSettings.Builder().autoExpandReplicas(\"0-all\").build();\nPutIndicesSettingsRequest putIndicesSettingsRequest = new PutIndicesSettingsRequest.Builder().index(index).value(indexSettings).build();\nclient.indices().putSettings(putIndicesSettingsRequest);", + "info": [ + { "url": "https://opensearch.org/docs/latest/logging/indexing", "title": "Log Indexing Documentation" } + ] } ] } @@ -183,9 +119,6 @@ "schema": [ { "type": "logs", - "info": [ - "https://opensearch.org/docs/latest/im-plugin/index-templates" - ], "content": "PUT _index_template/application_logs_template", "description": "Setup applicative logs index template in the dev console", "index-template": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/java_client/schemas/applicative-logs-1.0.0.mapping.json", @@ -197,7 +130,7 @@ "logs" ], "info": [ - "https://opensearch.org/docs/latest/dashboards/management/index-patterns/" + { "url": "https://opensearch.org/docs/latest/dashboards/management/index-patterns/", "title": "Index Pattern Management Guide" } ], "description": "Import index patterns `.ndjson` file to the saved objects", "index-pattern": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/java_client/assets/tutorial-1.0.0.ndjson", diff --git a/public/components/getting_started/getting_started_artifacts/java_client/static/logo.svg b/public/components/getting_started/getting_started_artifacts/java_client/static/logo.svg new file mode 100644 index 000000000..a6410a04a --- /dev/null +++ b/public/components/getting_started/getting_started_artifacts/java_client/static/logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/components/getting_started/getting_started_artifacts/nginx/nginx-1.0.0.json b/public/components/getting_started/getting_started_artifacts/nginx/nginx-1.0.0.json index d70847ff0..ed7dbfa40 100644 --- a/public/components/getting_started/getting_started_artifacts/nginx/nginx-1.0.0.json +++ b/public/components/getting_started/getting_started_artifacts/nginx/nginx-1.0.0.json @@ -108,7 +108,10 @@ } ], "getting-started": { - "info": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/nginx/getting-started/Getting-Started.md", + "info": { + "url": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/nginx/getting-started/Getting-Started.md", + "title": "Getting Started Guide" + }, "ingestion": ["fluent-bit"], "structured": "true", "technology": "nginx", @@ -116,63 +119,7 @@ "live-sample": "true", "workflows": [ { - "name": "Quick Start", - "description": "This is a fluent-bit based docker quick starting instructions tutorial with a live example", - "steps": [ - { - "name": "Create docker-network", - "label": "Environment setting", - "phase": "docker", - "type": "console-cmd", - "content": "docker network create opensearch-net", - "description": "Before running any docker-compose files, create the opensearch-net network" - }, - { - "name": "Setup docker .env file", - "label": "Environment Parameters", - "phase": "docker", - "type": "file-url", - "input-params": [ - { - "name": "OPENSEARCH_HOST", - "description": "Opensearch host", - "type": "host-name" - }, - { - "name": "OPENSEARCH_DASHBOARD_HOST", - "description": "Opensearch Dashboard host", - "type": "host-name" - } - ], - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/nginx/getting-started/.env"], - "content": "wget https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/nginx/getting-started/.env", - "description": "Setup docker-compose env variables " - }, - { - "name": "Setup fluent-bit folder", - "label": "Environment Parameters", - "phase": "docker", - "type": "file-url", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/nginx/getting-started/fluent-bit/fluent-bit.conf", - "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/nginx/getting-started/fluent-bit/otel-converter.lua", - "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/nginx/getting-started/fluent-bit/parsers.conf" - ], - "content": "mkdir -p fluent-bit\nwget -P fluent-bit https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/nginx/getting-started/fluent-bit/fluent-bit.conf \\\n https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/nginx/getting-started/fluent-bit/otel-converter.lua \\\n https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/nginx/getting-started/fluent-bit/parsers.conf\n", - "description": "Setup docker-compose fluent-bit's service configuration " - }, - { - "name": "Run docker-compose", - "label": "live container", - "type": "file-url", - "phase": "docker", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/nginx/getting-started/nginx-node.yml"], - "description": "Run docker-compose for the nginx live example and see the generated index", - "content": "wget -O nginx-node.yml https://raw.githubusercontent.com/opensearch-project/opensearch-catalog/main/integrations/observability/nginx/getting-started/nginx-node.yml && docker-compose -f nginx-node.yml up -d\n" - } - ] - }, - { - "name": "Connect to Nginx", + "name": "Self managed", "description": "This is a fluent-bit based getting started instructions tutorial", "steps": [ { @@ -180,16 +127,26 @@ "type": "console-cmd", "phase": "ingestion", "label": "Log Parsing", - "info": ["https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/nginx/getting-started/fluent-bit/parsers.conf"], + "info": [ + { + "url": "https://github.com/opensearch-project/opensearch-catalog/blob/main/integrations/observability/nginx/getting-started/fluent-bit/parsers.conf", + "title": "Fluent-Bit Parser Configuration" + } + ], "description": "Setup Fluent-Bit parser config file parsing Nginx access log fields", - "content": "[PARSER]\n Name apache\n Format regex\n Regex ^(?[^ ]*) [^ ]* (?[^ ]*) \\[(?