diff --git a/packages/activemq/_dev/deploy/variants.yml b/packages/activemq/_dev/deploy/variants.yml index 72ce4f1e0e8..01d8f85e581 100644 --- a/packages/activemq/_dev/deploy/variants.yml +++ b/packages/activemq/_dev/deploy/variants.yml @@ -1,4 +1,4 @@ variants: - v5.17.1: + "v5.17.1": SERVICE_VERSION: 5.17.1 default: v5.17.1 diff --git a/packages/activemq/changelog.yml b/packages/activemq/changelog.yml index 228703046ad..f8696b38c17 100644 --- a/packages/activemq/changelog.yml +++ b/packages/activemq/changelog.yml @@ -1,4 +1,9 @@ # newer versions go on top +- version: 0.14.0 + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "0.13.1" changes: - description: Remove forwarded tag from metrics data streams. diff --git a/packages/activemq/data_stream/audit/_dev/test/pipeline/test-audit.log-expected.json b/packages/activemq/data_stream/audit/_dev/test/pipeline/test-audit.log-expected.json index 49ee287dda8..8890a5894c2 100644 --- a/packages/activemq/data_stream/audit/_dev/test/pipeline/test-audit.log-expected.json +++ b/packages/activemq/data_stream/audit/_dev/test/pipeline/test-audit.log-expected.json @@ -11,7 +11,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.692324051Z", + "ingested": "2023-10-11T20:53:28.142058918Z", "kind": "event", "module": "activemq", "original": "INFO | anonymous called org.apache.activemq.broker.jmx.QueueView.retryMessages[] at 27-11-2019 08:45:57,213 | qtp443290224-47", @@ -41,7 +41,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.692336343Z", + "ingested": "2023-10-11T20:53:28.142069543Z", "kind": "event", "module": "activemq", "original": "INFO | admin called org.apache.activemq.broker.jmx.QueueView.retryMessages[] at 27-11-2019 08:45:57,229 | qtp443290224-45", @@ -71,7 +71,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.692337760Z", + "ingested": "2023-10-11T20:53:28.142070835Z", "kind": "event", "module": "activemq", "original": "WARN | admin requested /admin/createDestination.action [JMSDestination='test' JMSDestinationType='queue' secret='4eb0bc3e-9d7a-4256-844c-24f40fda98f1' ] from 127.0.0.1 | qtp12205619-39", @@ -101,7 +101,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.692338926Z", + "ingested": "2023-10-11T20:53:28.142071877Z", "kind": "event", "module": "activemq", "original": "INFO | guest requested /admin/purgeDestination.action [JMSDestination='test' JMSDestinationType='queue' secret='eff6a932-1b58-45da-a64a-1b30b246cfc9' ] from 127.0.0.1 | qtp12205619-36", diff --git a/packages/activemq/data_stream/audit/_dev/test/pipeline/test-common-config.yml b/packages/activemq/data_stream/audit/_dev/test/pipeline/test-common-config.yml index 3cabcf9fb82..f54a125abfd 100644 --- a/packages/activemq/data_stream/audit/_dev/test/pipeline/test-common-config.yml +++ b/packages/activemq/data_stream/audit/_dev/test/pipeline/test-common-config.yml @@ -1,5 +1,5 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" fields: "@timestamp": "2020-04-28T11:07:58.223Z" tags: diff --git a/packages/activemq/data_stream/log/_dev/test/pipeline/test-activemq.log-expected.json b/packages/activemq/data_stream/log/_dev/test/pipeline/test-activemq.log-expected.json index 873ebe2d2c2..670da8dd445 100644 --- a/packages/activemq/data_stream/log/_dev/test/pipeline/test-activemq.log-expected.json +++ b/packages/activemq/data_stream/log/_dev/test/pipeline/test-activemq.log-expected.json @@ -12,7 +12,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818887635Z", + "ingested": "2023-10-11T20:53:28.273182960Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,491 | INFO | KahaDB is version 6 | org.apache.activemq.store.kahadb.MessageDatabase | main", @@ -40,7 +40,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818898093Z", + "ingested": "2023-10-11T20:53:28.273190460Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,531 | INFO | PListStore:[/opt/activemq/data/localhost/tmp_storage] started | org.apache.activemq.store.kahadb.plist.PListStoreImpl | main", @@ -68,7 +68,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818899385Z", + "ingested": "2023-10-11T20:53:28.273191668Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,538 | INFO | Page File: /opt/activemq/data/kahadb/db.data. Recovered pageFile free list of size: 0 | org.apache.activemq.store.kahadb.disk.page.PageFile | KahaDB Index Free Page Recovery", @@ -96,7 +96,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818900385Z", + "ingested": "2023-10-11T20:53:28.273192710Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,690 | INFO | Apache ActiveMQ 5.15.9 (localhost, ID:5338986a6080-37033-1574867374550-0:1) is starting | org.apache.activemq.broker.BrokerService | main", @@ -127,7 +127,7 @@ "stack_trace": "at org.apache.activemq.util.IOExceptionSupport.create(IOExceptionSupport.java:28)[activemq-client-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.registerConnectorMBean(BrokerService.java:2264)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.startTransportConnector(BrokerService.java:2744)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.startAllConnectors(BrokerService.java:2640)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.doStartBroker(BrokerService.java:771)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.startBroker(BrokerService.java:733)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.start(BrokerService.java:636)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.xbean.XBeanBrokerService.afterPropertiesSet(XBeanBrokerService.java:73)[activemq-spring-5.15.9.jar:5.15.9]\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)[:1.8.0_212]\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)[:1.8.0_212]\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)[:1.8.0_212]\n\tat java.lang.reflect.Method.invoke(Method.java:498)[:1.8.0_212]\n\tat org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeCustomInitMethod(AbstractAutowireCapableBeanFactory.java:1763)[spring-beans-4.3.18.RELEASE.jar:4.3.18.RELEASE]\n\tat org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1700)[spring-beans-4.3.18.RELEASE.jar:4.3.18.RELEASE]" }, "event": { - "ingested": "2022-12-08T15:06:10.818901468Z", + "ingested": "2023-10-11T20:53:28.273193668Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,712 | ERROR | Failed to start Apache ActiveMQ (localhost, ID:5338986a6080-37033-1574867374550-0:1) | org.apache.activemq.broker.BrokerService | main\n\tat org.apache.activemq.util.IOExceptionSupport.create(IOExceptionSupport.java:28)[activemq-client-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.registerConnectorMBean(BrokerService.java:2264)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.startTransportConnector(BrokerService.java:2744)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.startAllConnectors(BrokerService.java:2640)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.doStartBroker(BrokerService.java:771)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.startBroker(BrokerService.java:733)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.broker.BrokerService.start(BrokerService.java:636)[activemq-broker-5.15.9.jar:5.15.9]\n\tat org.apache.activemq.xbean.XBeanBrokerService.afterPropertiesSet(XBeanBrokerService.java:73)[activemq-spring-5.15.9.jar:5.15.9]\n\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)[:1.8.0_212]\n\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)[:1.8.0_212]\n\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)[:1.8.0_212]\n\tat java.lang.reflect.Method.invoke(Method.java:498)[:1.8.0_212]\n\tat org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeCustomInitMethod(AbstractAutowireCapableBeanFactory.java:1763)[spring-beans-4.3.18.RELEASE.jar:4.3.18.RELEASE]\n\tat org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1700)[spring-beans-4.3.18.RELEASE.jar:4.3.18.RELEASE]", @@ -155,7 +155,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818902343Z", + "ingested": "2023-10-11T20:53:28.273194668Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,716 | INFO | Apache ActiveMQ 5.15.9 (localhost, ID:5338986a6080-37033-1574867374550-0:1) is shutting down | org.apache.activemq.broker.BrokerService | main", @@ -183,7 +183,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818903260Z", + "ingested": "2023-10-11T20:53:28.273195627Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,718 | INFO | Connector openwire stopped | org.apache.activemq.broker.TransportConnector | main", @@ -211,7 +211,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818904218Z", + "ingested": "2023-10-11T20:53:28.273196543Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,719 | INFO | Connector amqp stopped | org.apache.activemq.broker.TransportConnector | main", @@ -239,7 +239,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818905135Z", + "ingested": "2023-10-11T20:53:28.273197418Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,721 | INFO | Connector stomp stopped | org.apache.activemq.broker.TransportConnector | main", @@ -267,7 +267,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818906093Z", + "ingested": "2023-10-11T20:53:28.273200043Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,722 | INFO | Connector mqtt stopped | org.apache.activemq.broker.TransportConnector | main", @@ -295,7 +295,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818907051Z", + "ingested": "2023-10-11T20:53:28.273200960Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,723 | INFO | Connector ws stopped | org.apache.activemq.broker.TransportConnector | main", @@ -323,7 +323,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818908260Z", + "ingested": "2023-10-11T20:53:28.273201918Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,727 | INFO | PListStore:[/opt/activemq/data/localhost/tmp_storage] stopped | org.apache.activemq.store.kahadb.plist.PListStoreImpl | main", @@ -351,7 +351,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818909218Z", + "ingested": "2023-10-11T20:53:28.273202918Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,728 | INFO | Stopping async queue tasks | org.apache.activemq.store.kahadb.KahaDBStore | main", @@ -379,7 +379,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818910135Z", + "ingested": "2023-10-11T20:53:28.273203793Z", "kind": "event", "module": "activemq", "original": "2019-11-27 15:09:34,730 | INFO | Stopping async topic tasks | org.apache.activemq.store.kahadb.KahaDBStore | main", @@ -407,7 +407,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818911010Z", + "ingested": "2023-10-11T20:53:28.273204710Z", "kind": "event", "module": "activemq", "original": "2019-11-29 10:59:49,515 | INFO | No Spring WebApplicationInitializer types detected on classpath | /admin | main", @@ -435,7 +435,7 @@ "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818911885Z", + "ingested": "2023-10-11T20:53:28.273205668Z", "kind": "event", "module": "activemq", "original": "2019-11-29 10:59:49,779 | INFO | Initializing Spring FrameworkServlet 'dispatcher' | /admin | main", @@ -456,17 +456,17 @@ "activemq": { "log": { "caller": "org.apache.activemq.xbean.XBeanBrokerFactory", - "thread": "main org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Unexpected failure during bean definition parsing\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'; nested exception is org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-\u003e Property 'host'\\n\\tat org.springframework.beans.factory.parsing.FailFastProblemReporter.error(FailFastProblemReporter.java:70)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:118)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.error(BeanDefinitionParserDelegate.java:308)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:561)\\n\\tat org.apache.xbean.spring.context.v2c.XBeanBeanDefinitionParserDelegate.parseBeanDefinitionElement(XBeanBeanDefinitionParserDelegate.java:58)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:459)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:428)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.processBeanDefinition(XBeanBeanDefinitionDocumentReader.java:188)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseDefaultElement(XBeanBeanDefinitionDocumentReader.java:115)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseBeanDefinitions(XBeanBeanDefinitionDocumentReader.java:95)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.doRegisterBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:142)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.registerBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:94)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.registerBeanDefinitions(XmlBeanDefinitionReader.java:508)\\n\\tat org.apache.xbean.spring.context.v2.XBeanXmlBeanDefinitionReader.registerBeanDefinitions(XBeanXmlBeanDefinitionReader.java:79)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.doLoadBeanDefinitions(XmlBeanDefinitionReader.java:392)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:336)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:304)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.importBeanDefinitionResource(XBeanBeanDefinitionDocumentReader.java:143)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseDefaultElement(XBeanBeanDefinitionDocumentReader.java:109)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseBeanDefinitions(XBeanBeanDefinitionDocumentReader.java:95)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.doRegisterBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:142)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.registerBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:94)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.registerBeanDefinitions(XmlBeanDefinitionReader.java:508)\\n\\tat org.apache.xbean.spring.context.v2.XBeanXmlBeanDefinitionReader.registerBeanDefinitions(XBeanXmlBeanDefinitionReader.java:79)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.doLoadBeanDefinitions(XmlBeanDefinitionReader.java:392)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:336)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:304)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.loadBeanDefinitions(ResourceXmlApplicationContext.java:116)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.loadBeanDefinitions(ResourceXmlApplicationContext.java:104)\\n\\tat org.springframework.context.support.AbstractRefreshableApplicationContext.refreshBeanFactory(AbstractRefreshableApplicationContext.java:126)\\n\\tat org.springframework.context.support.AbstractApplicationContext.obtainFreshBeanFactory(AbstractApplicationContext.java:614)\\n\\tat org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:514)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.\u003cinit\u003e(ResourceXmlApplicationContext.java:64)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.\u003cinit\u003e(ResourceXmlApplicationContext.java:52)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory$1.\u003cinit\u003e(XBeanBrokerFactory.java:104)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory.createApplicationContext(XBeanBrokerFactory.java:104)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory.createBroker(XBeanBrokerFactory.java:67)\\n\\tat org.apache.activemq.broker.BrokerFactory.createBroker(BrokerFactory.java:71)\\n\\tat org.apache.activemq.broker.BrokerFactory.createBroker(BrokerFactory.java:54)\\n\\tat org.apache.activemq.console.command.StartCommand.runTask(StartCommand.java:87)\\n\\tat org.apache.activemq.console.command.AbstractCommand.execute(AbstractCommand.java:63)\\n\\tat org.apache.activemq.console.command.ShellCommand.runTask(ShellCommand.java:154)\\n\\tat org.apache.activemq.console.command.AbstractCommand.execute(AbstractCommand.java:63)\\n\\tat org.apache.activemq.console.command.ShellCommand.main(ShellCommand.java:104)\\n\\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\\n\\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\\n\\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\\n\\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\\n\\tat org.apache.activemq.console.Main.runTaskClass(Main.java:262)\\n\\tat org.apache.activemq.console.Main.main(Main.java:115)\\nCaused by: org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-\u003e Property 'host'\\n\\tat org.springframework.beans.factory.parsing.FailFastProblemReporter.error(FailFastProblemReporter.java:70)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:118)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:110)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.error(BeanDefinitionParserDelegate.java:301)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parsePropertyElement(BeanDefinitionParserDelegate.java:897)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parsePropertyElements(BeanDefinitionParserDelegate.java:761)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:546)\\n\\t... 46 more" + "thread": "main org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Unexpected failure during bean definition parsing\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'; nested exception is org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-> Property 'host'\\n\\tat org.springframework.beans.factory.parsing.FailFastProblemReporter.error(FailFastProblemReporter.java:70)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:118)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.error(BeanDefinitionParserDelegate.java:308)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:561)\\n\\tat org.apache.xbean.spring.context.v2c.XBeanBeanDefinitionParserDelegate.parseBeanDefinitionElement(XBeanBeanDefinitionParserDelegate.java:58)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:459)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:428)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.processBeanDefinition(XBeanBeanDefinitionDocumentReader.java:188)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseDefaultElement(XBeanBeanDefinitionDocumentReader.java:115)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseBeanDefinitions(XBeanBeanDefinitionDocumentReader.java:95)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.doRegisterBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:142)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.registerBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:94)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.registerBeanDefinitions(XmlBeanDefinitionReader.java:508)\\n\\tat org.apache.xbean.spring.context.v2.XBeanXmlBeanDefinitionReader.registerBeanDefinitions(XBeanXmlBeanDefinitionReader.java:79)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.doLoadBeanDefinitions(XmlBeanDefinitionReader.java:392)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:336)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:304)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.importBeanDefinitionResource(XBeanBeanDefinitionDocumentReader.java:143)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseDefaultElement(XBeanBeanDefinitionDocumentReader.java:109)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseBeanDefinitions(XBeanBeanDefinitionDocumentReader.java:95)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.doRegisterBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:142)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.registerBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:94)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.registerBeanDefinitions(XmlBeanDefinitionReader.java:508)\\n\\tat org.apache.xbean.spring.context.v2.XBeanXmlBeanDefinitionReader.registerBeanDefinitions(XBeanXmlBeanDefinitionReader.java:79)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.doLoadBeanDefinitions(XmlBeanDefinitionReader.java:392)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:336)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:304)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.loadBeanDefinitions(ResourceXmlApplicationContext.java:116)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.loadBeanDefinitions(ResourceXmlApplicationContext.java:104)\\n\\tat org.springframework.context.support.AbstractRefreshableApplicationContext.refreshBeanFactory(AbstractRefreshableApplicationContext.java:126)\\n\\tat org.springframework.context.support.AbstractApplicationContext.obtainFreshBeanFactory(AbstractApplicationContext.java:614)\\n\\tat org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:514)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.(ResourceXmlApplicationContext.java:64)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.(ResourceXmlApplicationContext.java:52)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory$1.(XBeanBrokerFactory.java:104)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory.createApplicationContext(XBeanBrokerFactory.java:104)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory.createBroker(XBeanBrokerFactory.java:67)\\n\\tat org.apache.activemq.broker.BrokerFactory.createBroker(BrokerFactory.java:71)\\n\\tat org.apache.activemq.broker.BrokerFactory.createBroker(BrokerFactory.java:54)\\n\\tat org.apache.activemq.console.command.StartCommand.runTask(StartCommand.java:87)\\n\\tat org.apache.activemq.console.command.AbstractCommand.execute(AbstractCommand.java:63)\\n\\tat org.apache.activemq.console.command.ShellCommand.runTask(ShellCommand.java:154)\\n\\tat org.apache.activemq.console.command.AbstractCommand.execute(AbstractCommand.java:63)\\n\\tat org.apache.activemq.console.command.ShellCommand.main(ShellCommand.java:104)\\n\\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\\n\\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\\n\\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\\n\\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\\n\\tat org.apache.activemq.console.Main.runTaskClass(Main.java:262)\\n\\tat org.apache.activemq.console.Main.main(Main.java:115)\\nCaused by: org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-> Property 'host'\\n\\tat org.springframework.beans.factory.parsing.FailFastProblemReporter.error(FailFastProblemReporter.java:70)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:118)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:110)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.error(BeanDefinitionParserDelegate.java:301)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parsePropertyElement(BeanDefinitionParserDelegate.java:897)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parsePropertyElements(BeanDefinitionParserDelegate.java:761)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:546)\\n\\t... 46 more" } }, "ecs": { "version": "8.5.1" }, "event": { - "ingested": "2022-12-08T15:06:10.818913051Z", + "ingested": "2023-10-11T20:53:28.273206960Z", "kind": "event", "module": "activemq", - "original": "2022-06-17 12:19:13,443 | ERROR | Failed to load: class path resource [activemq.xml], reason: Configuration problem: Unexpected failure during bean definition parsing\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'; nested exception is org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-\u003e Property 'host' | org.apache.activemq.xbean.XBeanBrokerFactory | main org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Unexpected failure during bean definition parsing\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'; nested exception is org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-\u003e Property 'host'\\n\\tat org.springframework.beans.factory.parsing.FailFastProblemReporter.error(FailFastProblemReporter.java:70)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:118)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.error(BeanDefinitionParserDelegate.java:308)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:561)\\n\\tat org.apache.xbean.spring.context.v2c.XBeanBeanDefinitionParserDelegate.parseBeanDefinitionElement(XBeanBeanDefinitionParserDelegate.java:58)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:459)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:428)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.processBeanDefinition(XBeanBeanDefinitionDocumentReader.java:188)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseDefaultElement(XBeanBeanDefinitionDocumentReader.java:115)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseBeanDefinitions(XBeanBeanDefinitionDocumentReader.java:95)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.doRegisterBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:142)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.registerBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:94)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.registerBeanDefinitions(XmlBeanDefinitionReader.java:508)\\n\\tat org.apache.xbean.spring.context.v2.XBeanXmlBeanDefinitionReader.registerBeanDefinitions(XBeanXmlBeanDefinitionReader.java:79)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.doLoadBeanDefinitions(XmlBeanDefinitionReader.java:392)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:336)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:304)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.importBeanDefinitionResource(XBeanBeanDefinitionDocumentReader.java:143)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseDefaultElement(XBeanBeanDefinitionDocumentReader.java:109)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseBeanDefinitions(XBeanBeanDefinitionDocumentReader.java:95)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.doRegisterBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:142)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.registerBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:94)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.registerBeanDefinitions(XmlBeanDefinitionReader.java:508)\\n\\tat org.apache.xbean.spring.context.v2.XBeanXmlBeanDefinitionReader.registerBeanDefinitions(XBeanXmlBeanDefinitionReader.java:79)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.doLoadBeanDefinitions(XmlBeanDefinitionReader.java:392)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:336)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:304)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.loadBeanDefinitions(ResourceXmlApplicationContext.java:116)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.loadBeanDefinitions(ResourceXmlApplicationContext.java:104)\\n\\tat org.springframework.context.support.AbstractRefreshableApplicationContext.refreshBeanFactory(AbstractRefreshableApplicationContext.java:126)\\n\\tat org.springframework.context.support.AbstractApplicationContext.obtainFreshBeanFactory(AbstractApplicationContext.java:614)\\n\\tat org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:514)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.\u003cinit\u003e(ResourceXmlApplicationContext.java:64)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.\u003cinit\u003e(ResourceXmlApplicationContext.java:52)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory$1.\u003cinit\u003e(XBeanBrokerFactory.java:104)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory.createApplicationContext(XBeanBrokerFactory.java:104)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory.createBroker(XBeanBrokerFactory.java:67)\\n\\tat org.apache.activemq.broker.BrokerFactory.createBroker(BrokerFactory.java:71)\\n\\tat org.apache.activemq.broker.BrokerFactory.createBroker(BrokerFactory.java:54)\\n\\tat org.apache.activemq.console.command.StartCommand.runTask(StartCommand.java:87)\\n\\tat org.apache.activemq.console.command.AbstractCommand.execute(AbstractCommand.java:63)\\n\\tat org.apache.activemq.console.command.ShellCommand.runTask(ShellCommand.java:154)\\n\\tat org.apache.activemq.console.command.AbstractCommand.execute(AbstractCommand.java:63)\\n\\tat org.apache.activemq.console.command.ShellCommand.main(ShellCommand.java:104)\\n\\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\\n\\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\\n\\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\\n\\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\\n\\tat org.apache.activemq.console.Main.runTaskClass(Main.java:262)\\n\\tat org.apache.activemq.console.Main.main(Main.java:115)\\nCaused by: org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-\u003e Property 'host'\\n\\tat org.springframework.beans.factory.parsing.FailFastProblemReporter.error(FailFastProblemReporter.java:70)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:118)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:110)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.error(BeanDefinitionParserDelegate.java:301)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parsePropertyElement(BeanDefinitionParserDelegate.java:897)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parsePropertyElements(BeanDefinitionParserDelegate.java:761)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:546)\\n\\t... 46 more", + "original": "2022-06-17 12:19:13,443 | ERROR | Failed to load: class path resource [activemq.xml], reason: Configuration problem: Unexpected failure during bean definition parsing\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'; nested exception is org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-> Property 'host' | org.apache.activemq.xbean.XBeanBrokerFactory | main org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Unexpected failure during bean definition parsing\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'; nested exception is org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-> Property 'host'\\n\\tat org.springframework.beans.factory.parsing.FailFastProblemReporter.error(FailFastProblemReporter.java:70)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:118)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.error(BeanDefinitionParserDelegate.java:308)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:561)\\n\\tat org.apache.xbean.spring.context.v2c.XBeanBeanDefinitionParserDelegate.parseBeanDefinitionElement(XBeanBeanDefinitionParserDelegate.java:58)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:459)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:428)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.processBeanDefinition(XBeanBeanDefinitionDocumentReader.java:188)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseDefaultElement(XBeanBeanDefinitionDocumentReader.java:115)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseBeanDefinitions(XBeanBeanDefinitionDocumentReader.java:95)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.doRegisterBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:142)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.registerBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:94)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.registerBeanDefinitions(XmlBeanDefinitionReader.java:508)\\n\\tat org.apache.xbean.spring.context.v2.XBeanXmlBeanDefinitionReader.registerBeanDefinitions(XBeanXmlBeanDefinitionReader.java:79)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.doLoadBeanDefinitions(XmlBeanDefinitionReader.java:392)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:336)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:304)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.importBeanDefinitionResource(XBeanBeanDefinitionDocumentReader.java:143)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseDefaultElement(XBeanBeanDefinitionDocumentReader.java:109)\\n\\tat org.apache.xbean.spring.context.v2.XBeanBeanDefinitionDocumentReader.parseBeanDefinitions(XBeanBeanDefinitionDocumentReader.java:95)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.doRegisterBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:142)\\n\\tat org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.registerBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:94)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.registerBeanDefinitions(XmlBeanDefinitionReader.java:508)\\n\\tat org.apache.xbean.spring.context.v2.XBeanXmlBeanDefinitionReader.registerBeanDefinitions(XBeanXmlBeanDefinitionReader.java:79)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.doLoadBeanDefinitions(XmlBeanDefinitionReader.java:392)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:336)\\n\\tat org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:304)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.loadBeanDefinitions(ResourceXmlApplicationContext.java:116)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.loadBeanDefinitions(ResourceXmlApplicationContext.java:104)\\n\\tat org.springframework.context.support.AbstractRefreshableApplicationContext.refreshBeanFactory(AbstractRefreshableApplicationContext.java:126)\\n\\tat org.springframework.context.support.AbstractApplicationContext.obtainFreshBeanFactory(AbstractApplicationContext.java:614)\\n\\tat org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:514)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.(ResourceXmlApplicationContext.java:64)\\n\\tat org.apache.xbean.spring.context.ResourceXmlApplicationContext.(ResourceXmlApplicationContext.java:52)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory$1.(XBeanBrokerFactory.java:104)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory.createApplicationContext(XBeanBrokerFactory.java:104)\\n\\tat org.apache.activemq.xbean.XBeanBrokerFactory.createBroker(XBeanBrokerFactory.java:67)\\n\\tat org.apache.activemq.broker.BrokerFactory.createBroker(BrokerFactory.java:71)\\n\\tat org.apache.activemq.broker.BrokerFactory.createBroker(BrokerFactory.java:54)\\n\\tat org.apache.activemq.console.command.StartCommand.runTask(StartCommand.java:87)\\n\\tat org.apache.activemq.console.command.AbstractCommand.execute(AbstractCommand.java:63)\\n\\tat org.apache.activemq.console.command.ShellCommand.runTask(ShellCommand.java:154)\\n\\tat org.apache.activemq.console.command.AbstractCommand.execute(AbstractCommand.java:63)\\n\\tat org.apache.activemq.console.command.ShellCommand.main(ShellCommand.java:104)\\n\\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\\n\\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\\n\\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\\n\\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\\n\\tat org.apache.activemq.console.Main.runTaskClass(Main.java:262)\\n\\tat org.apache.activemq.console.Main.main(Main.java:115)\\nCaused by: org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-> Property 'host'\\n\\tat org.springframework.beans.factory.parsing.FailFastProblemReporter.error(FailFastProblemReporter.java:70)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:118)\\n\\tat org.springframework.beans.factory.parsing.ReaderContext.error(ReaderContext.java:110)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.error(BeanDefinitionParserDelegate.java:301)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parsePropertyElement(BeanDefinitionParserDelegate.java:897)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parsePropertyElements(BeanDefinitionParserDelegate.java:761)\\n\\tat org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseBeanDefinitionElement(BeanDefinitionParserDelegate.java:546)\\n\\t... 46 more", "type": [ "error" ] @@ -474,7 +474,7 @@ "log": { "level": "ERROR" }, - "message": "Failed to load: class path resource [activemq.xml], reason: Configuration problem: Unexpected failure during bean definition parsing\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'; nested exception is org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-\u003e Property 'host'", + "message": "Failed to load: class path resource [activemq.xml], reason: Configuration problem: Unexpected failure during bean definition parsing\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'; nested exception is org.springframework.beans.factory.parsing.BeanDefinitionParsingException: Configuration problem: Multiple 'property' definitions for property 'host'\\nOffending resource: class path resource [jetty.xml]\\nBean 'jettyPort'\\n\\t-> Property 'host'", "tags": [ "preserve_original_event" ] diff --git a/packages/activemq/data_stream/log/_dev/test/pipeline/test-common-config.yml b/packages/activemq/data_stream/log/_dev/test/pipeline/test-common-config.yml index 4f4badc2826..0b52372cdaa 100644 --- a/packages/activemq/data_stream/log/_dev/test/pipeline/test-common-config.yml +++ b/packages/activemq/data_stream/log/_dev/test/pipeline/test-common-config.yml @@ -1,5 +1,5 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" fields: "@timestamp": "2020-04-28T11:07:58.223Z" tags: diff --git a/packages/activemq/data_stream/topic/fields/ecs.yml b/packages/activemq/data_stream/topic/fields/ecs.yml index 40753d80057..20fc8ab532e 100644 --- a/packages/activemq/data_stream/topic/fields/ecs.yml +++ b/packages/activemq/data_stream/topic/fields/ecs.yml @@ -23,8 +23,6 @@ name: service.type - external: ecs name: tags -- external: ecs - name: tags - external: ecs name: agent.id dimension: true diff --git a/packages/activemq/manifest.yml b/packages/activemq/manifest.yml index 16814d5e4cf..2787313288c 100644 --- a/packages/activemq/manifest.yml +++ b/packages/activemq/manifest.yml @@ -1,6 +1,6 @@ name: activemq title: ActiveMQ -version: "0.13.1" +version: "0.14.0" description: Collect logs and metrics from ActiveMQ instances with Elastic Agent. type: integration icons: @@ -8,13 +8,15 @@ icons: title: activemq size: 32x32 type: image/svg+xml -format_version: 1.0.0 -license: basic +format_version: "3.0.0" categories: - message_queue - observability conditions: - kibana.version: ^8.8.0 + kibana: + version: ^8.8.0 + elastic: + subscription: basic screenshots: - src: /img/filebeat-activemq-log.png title: filebeat ActiveMQ application events @@ -116,3 +118,4 @@ policy_templates: description: Collecting broker, queue and topic metrics from ActiveMQ instances. owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/activemq/validation.yml b/packages/activemq/validation.yml new file mode 100644 index 00000000000..efdb1de132d --- /dev/null +++ b/packages/activemq/validation.yml @@ -0,0 +1,4 @@ +errors: + exclude_checks: + - SVR00004 + - SVR00002 diff --git a/packages/airflow/changelog.yml b/packages/airflow/changelog.yml index 7025ab3d3de..aee816f9d4c 100644 --- a/packages/airflow/changelog.yml +++ b/packages/airflow/changelog.yml @@ -1,4 +1,9 @@ # newer versions go on top +- version: 0.5.0 + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "0.4.0" changes: - description: Enable time series data streams for the metrics datasets. This dramatically reduces storage for metrics and is expected to progressively improve query performance. For more details, see https://www.elastic.co/guide/en/elasticsearch/reference/current/tsds.html. diff --git a/packages/airflow/data_stream/statsd/fields/agent.yml b/packages/airflow/data_stream/statsd/fields/agent.yml index d00130ad600..4ee28c0d22f 100644 --- a/packages/airflow/data_stream/statsd/fields/agent.yml +++ b/packages/airflow/data_stream/statsd/fields/agent.yml @@ -57,33 +57,6 @@ - name: image.id type: keyword description: Image ID for the cloud instance. -- name: container - title: Container - group: 2 - description: "Container fields are used for meta information about the specific container that is the source of information.\nThese fields help correlate data based containers from any runtime." - type: group - fields: - - name: id - level: core - type: keyword - ignore_above: 1024 - description: Unique container id. - dimension: true - - name: image.name - level: extended - type: keyword - ignore_above: 1024 - description: Name of the image the container was built on. - - name: labels - level: extended - type: object - object_type: keyword - description: Image labels. - - name: name - level: extended - type: keyword - ignore_above: 1024 - description: Container name. - name: host title: Host group: 2 diff --git a/packages/airflow/data_stream/statsd/fields/ecs.yml b/packages/airflow/data_stream/statsd/fields/ecs.yml index 9175f0f0c13..280c4a8008b 100644 --- a/packages/airflow/data_stream/statsd/fields/ecs.yml +++ b/packages/airflow/data_stream/statsd/fields/ecs.yml @@ -14,5 +14,7 @@ external: ecs - name: container.image.name external: ecs +- name: container.labels + external: ecs - name: host external: ecs diff --git a/packages/airflow/manifest.yml b/packages/airflow/manifest.yml index 48136adec8e..81bf1b7be2e 100644 --- a/packages/airflow/manifest.yml +++ b/packages/airflow/manifest.yml @@ -1,14 +1,16 @@ name: airflow title: Airflow -version: "0.4.0" +version: "0.5.0" description: Airflow Integration. type: integration -format_version: 1.0.0 -license: basic +format_version: "3.0.0" categories: - observability conditions: - kibana.version: "^8.9.0" + kibana: + version: "^8.9.0" + elastic: + subscription: basic icons: - src: /img/airflow.svg title: Airflow logo @@ -42,3 +44,4 @@ policy_templates: input_group: metrics owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/airflow/validation.yml b/packages/airflow/validation.yml new file mode 100644 index 00000000000..bcc8f74ac3a --- /dev/null +++ b/packages/airflow/validation.yml @@ -0,0 +1,3 @@ +errors: + exclude_checks: + - SVR00002 diff --git a/packages/apache_spark/_dev/deploy/variants.yml b/packages/apache_spark/_dev/deploy/variants.yml index def7f20eff0..c237322a6aa 100644 --- a/packages/apache_spark/_dev/deploy/variants.yml +++ b/packages/apache_spark/_dev/deploy/variants.yml @@ -1,4 +1,4 @@ variants: - v3.2.0: + "v3.2.0": SERVICE_VERSION: 3.2.0 default: v3.2.0 diff --git a/packages/apache_spark/changelog.yml b/packages/apache_spark/changelog.yml index 83f9c55a84a..18641eb62e2 100644 --- a/packages/apache_spark/changelog.yml +++ b/packages/apache_spark/changelog.yml @@ -1,4 +1,9 @@ # newer versions go on top +- version: "0.8.0" + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "0.7.9" changes: - description: Add filters in visualizations. diff --git a/packages/apache_spark/data_stream/application/elasticsearch/ingest_pipeline/default.yml b/packages/apache_spark/data_stream/application/elasticsearch/ingest_pipeline/default.yml index 1501db54da7..814eebda530 100644 --- a/packages/apache_spark/data_stream/application/elasticsearch/ingest_pipeline/default.yml +++ b/packages/apache_spark/data_stream/application/elasticsearch/ingest_pipeline/default.yml @@ -10,7 +10,7 @@ processors: ignore_missing: true - set: field: event.type - value: info + value: [info] - set: field: event.kind value: metric diff --git a/packages/apache_spark/data_stream/application/sample_event.json b/packages/apache_spark/data_stream/application/sample_event.json index 76f3234c2a9..99cb29530cb 100644 --- a/packages/apache_spark/data_stream/application/sample_event.json +++ b/packages/apache_spark/data_stream/application/sample_event.json @@ -34,14 +34,18 @@ "ingested": "2023-09-28T09:24:37Z", "kind": "metric", "module": "apache_spark", - "type": "info" + "type": [ + "info" + ] }, "host": { "architecture": "x86_64", "containerized": true, "hostname": "docker-fleet-agent", "id": "e8978f2086c14e13b7a0af9ed0011d19", - "ip": "172.20.0.7", + "ip": [ + "172.20.0.7" + ], "mac": "02-42-AC-14-00-07", "name": "docker-fleet-agent", "os": { diff --git a/packages/apache_spark/data_stream/driver/elasticsearch/ingest_pipeline/default.yml b/packages/apache_spark/data_stream/driver/elasticsearch/ingest_pipeline/default.yml index cb9ddf27e75..72e6ce75abf 100644 --- a/packages/apache_spark/data_stream/driver/elasticsearch/ingest_pipeline/default.yml +++ b/packages/apache_spark/data_stream/driver/elasticsearch/ingest_pipeline/default.yml @@ -10,7 +10,7 @@ processors: ignore_missing: true - set: field: event.type - value: info + value: [info] - set: field: event.kind value: metric diff --git a/packages/apache_spark/data_stream/driver/sample_event.json b/packages/apache_spark/data_stream/driver/sample_event.json index 28c6d9c6359..0e061376a57 100644 --- a/packages/apache_spark/data_stream/driver/sample_event.json +++ b/packages/apache_spark/data_stream/driver/sample_event.json @@ -38,14 +38,18 @@ "ingested": "2023-09-29T12:04:41Z", "kind": "metric", "module": "apache_spark", - "type": "info" + "type": [ + "info" + ] }, "host": { "architecture": "x86_64", "containerized": true, "hostname": "docker-fleet-agent", "id": "e8978f2086c14e13b7a0af9ed0011d19", - "ip": "172.26.0.7", + "ip": [ + "172.26.0.7" + ], "mac": "02-42-AC-1A-00-07", "name": "docker-fleet-agent", "os": { diff --git a/packages/apache_spark/data_stream/executor/elasticsearch/ingest_pipeline/default.yml b/packages/apache_spark/data_stream/executor/elasticsearch/ingest_pipeline/default.yml index a03357e8403..0f7a21016aa 100644 --- a/packages/apache_spark/data_stream/executor/elasticsearch/ingest_pipeline/default.yml +++ b/packages/apache_spark/data_stream/executor/elasticsearch/ingest_pipeline/default.yml @@ -10,7 +10,7 @@ processors: ignore_missing: true - set: field: event.type - value: info + value: [info] - set: field: event.kind value: metric diff --git a/packages/apache_spark/data_stream/executor/sample_event.json b/packages/apache_spark/data_stream/executor/sample_event.json index fb4ee74d40f..69a71568d11 100644 --- a/packages/apache_spark/data_stream/executor/sample_event.json +++ b/packages/apache_spark/data_stream/executor/sample_event.json @@ -37,14 +37,18 @@ "ingested": "2023-09-28T09:26:49Z", "kind": "metric", "module": "apache_spark", - "type": "info" + "type": [ + "info" + ] }, "host": { "architecture": "x86_64", "containerized": true, "hostname": "docker-fleet-agent", "id": "e8978f2086c14e13b7a0af9ed0011d19", - "ip": "172.20.0.7", + "ip": [ + "172.20.0.7" + ], "mac": "02-42-AC-14-00-07", "name": "docker-fleet-agent", "os": { diff --git a/packages/apache_spark/data_stream/node/elasticsearch/ingest_pipeline/default.yml b/packages/apache_spark/data_stream/node/elasticsearch/ingest_pipeline/default.yml index b2a7cf86a9e..bfb92a4c7bd 100644 --- a/packages/apache_spark/data_stream/node/elasticsearch/ingest_pipeline/default.yml +++ b/packages/apache_spark/data_stream/node/elasticsearch/ingest_pipeline/default.yml @@ -10,7 +10,7 @@ processors: ignore_missing: true - set: field: event.type - value: info + value: [info] - set: field: event.kind value: metric diff --git a/packages/apache_spark/data_stream/node/sample_event.json b/packages/apache_spark/data_stream/node/sample_event.json index f3cc37d90e6..e9b5798cb03 100644 --- a/packages/apache_spark/data_stream/node/sample_event.json +++ b/packages/apache_spark/data_stream/node/sample_event.json @@ -41,7 +41,9 @@ "ingested": "2022-04-12T04:42:53Z", "kind": "metric", "module": "apache_spark", - "type": "info" + "type": [ + "info" + ] }, "host": { "architecture": "x86_64", diff --git a/packages/apache_spark/docs/README.md b/packages/apache_spark/docs/README.md index b45dc64b850..9d9f86839dd 100644 --- a/packages/apache_spark/docs/README.md +++ b/packages/apache_spark/docs/README.md @@ -112,14 +112,18 @@ An example event for `application` looks as following: "ingested": "2023-09-28T09:24:37Z", "kind": "metric", "module": "apache_spark", - "type": "info" + "type": [ + "info" + ] }, "host": { "architecture": "x86_64", "containerized": true, "hostname": "docker-fleet-agent", "id": "e8978f2086c14e13b7a0af9ed0011d19", - "ip": "172.20.0.7", + "ip": [ + "172.20.0.7" + ], "mac": "02-42-AC-14-00-07", "name": "docker-fleet-agent", "os": { @@ -223,14 +227,18 @@ An example event for `driver` looks as following: "ingested": "2023-09-29T12:04:41Z", "kind": "metric", "module": "apache_spark", - "type": "info" + "type": [ + "info" + ] }, "host": { "architecture": "x86_64", "containerized": true, "hostname": "docker-fleet-agent", "id": "e8978f2086c14e13b7a0af9ed0011d19", - "ip": "172.26.0.7", + "ip": [ + "172.26.0.7" + ], "mac": "02-42-AC-1A-00-07", "name": "docker-fleet-agent", "os": { @@ -400,14 +408,18 @@ An example event for `executor` looks as following: "ingested": "2023-09-28T09:26:49Z", "kind": "metric", "module": "apache_spark", - "type": "info" + "type": [ + "info" + ] }, "host": { "architecture": "x86_64", "containerized": true, "hostname": "docker-fleet-agent", "id": "e8978f2086c14e13b7a0af9ed0011d19", - "ip": "172.20.0.7", + "ip": [ + "172.20.0.7" + ], "mac": "02-42-AC-14-00-07", "name": "docker-fleet-agent", "os": { @@ -579,7 +591,9 @@ An example event for `node` looks as following: "ingested": "2022-04-12T04:42:53Z", "kind": "metric", "module": "apache_spark", - "type": "info" + "type": [ + "info" + ] }, "host": { "architecture": "x86_64", diff --git a/packages/apache_spark/manifest.yml b/packages/apache_spark/manifest.yml index 71e2aa5f1b3..4bf000285ea 100644 --- a/packages/apache_spark/manifest.yml +++ b/packages/apache_spark/manifest.yml @@ -1,15 +1,17 @@ -format_version: 1.0.0 +format_version: "3.0.0" name: apache_spark title: Apache Spark -version: "0.7.9" -license: basic +version: "0.8.0" description: Collect metrics from Apache Spark with Elastic Agent. type: integration categories: - observability - analytics_engine conditions: - kibana.version: ^8.8.0 + kibana: + version: ^8.8.0 + elastic: + subscription: basic screenshots: - src: /img/apache_spark-screenshot.png title: Apache Spark screenshot @@ -30,3 +32,4 @@ policy_templates: description: Collecting metrics from Apache Spark. owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/apache_spark/validation.yml b/packages/apache_spark/validation.yml new file mode 100644 index 00000000000..efdb1de132d --- /dev/null +++ b/packages/apache_spark/validation.yml @@ -0,0 +1,4 @@ +errors: + exclude_checks: + - SVR00004 + - SVR00002 diff --git a/packages/azure_functions/changelog.yml b/packages/azure_functions/changelog.yml index b32204fde0b..032c6388a9a 100644 --- a/packages/azure_functions/changelog.yml +++ b/packages/azure_functions/changelog.yml @@ -1,4 +1,9 @@ # newer versions go on top +- version: 0.2.0 + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "0.1.0" changes: - description: Add Azure Functions metrics data stream diff --git a/packages/azure_functions/data_stream/functionapplogs/_dev/test/pipeline/test-azure-functions-error-raw.log-expected.json b/packages/azure_functions/data_stream/functionapplogs/_dev/test/pipeline/test-azure-functions-error-raw.log-expected.json index 6e6ff04875e..8594a49007a 100644 --- a/packages/azure_functions/data_stream/functionapplogs/_dev/test/pipeline/test-azure-functions-error-raw.log-expected.json +++ b/packages/azure_functions/data_stream/functionapplogs/_dev/test/pipeline/test-azure-functions-error-raw.log-expected.json @@ -9,7 +9,7 @@ "category": "Function.hello", "event_id": 3, "event_name": "FunctionCompleted", - "exception_details": "Microsoft.Azure.WebJobs.Host.FunctionInvocationException : Exception while executing function: Functions.hello ---\u003e Microsoft.Azure.WebJobs.Script.Workers.Rpc.RpcException : Result: Failure\nException: Exception: Ka-booom!\nStack: File \"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\", line 479, in _handle__invocation_request\n call_result = await self._loop.run_in_executor(\n File \"/usr/local/lib/python3.9/concurrent/futures/thread.py\", line 58, in run\n result = self.fn(*self.args, **self.kwargs)\n File \"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\", line 752, in _run_sync_func\n return ExtensionManager.get_sync_invocation_wrapper(context,\n File \"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/extension.py\", line 215, in _raw_invocation_wrapper\n result = function(**args)\n File \"/home/site/wwwroot/hello/__init__.py\", line 22, in main\n raise Exception(\"Ka-booom!\")\n\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Script.Description.WorkerFunctionInvoker.InvokeCore(Object[] parameters,FunctionInvocationContext context) at /src/azure-functions-host/src/WebJobs.Script/Description/Workers/WorkerFunctionInvoker.cs : 101\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Script.Description.FunctionInvokerBase.Invoke(Object[] parameters) at /src/azure-functions-host/src/WebJobs.Script/Description/FunctionInvokerBase.cs : 82\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Script.Description.FunctionGenerator.Coerce[T](Task`1 src) at /src/azure-functions-host/src/WebJobs.Script/Description/FunctionGenerator.cs : 225\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionInvoker`2.InvokeAsync[TReflected,TReturnValue](Object instance,Object[] arguments) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionInvoker.cs : 52\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.InvokeWithTimeoutAsync(IFunctionInvoker invoker,ParameterHelper parameterHelper,CancellationTokenSource timeoutTokenSource,CancellationTokenSource functionCancellationTokenSource,Boolean throwOnTimeout,TimeSpan timerInterval,IFunctionInstance instance) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionExecutor.cs : 581\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithWatchersAsync(IFunctionInstanceEx instance,ParameterHelper parameterHelper,ILogger logger,CancellationTokenSource functionCancellationTokenSource) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionExecutor.cs : 527\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithLoggingAsync(IFunctionInstanceEx instance,FunctionStartedMessage message,FunctionInstanceLogEntry instanceLogEntry,ParameterHelper parameterHelper,ILogger logger,CancellationToken cancellationToken) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionExecutor.cs : 306\n End of inner exception\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithLoggingAsync(IFunctionInstanceEx instance,FunctionStartedMessage message,FunctionInstanceLogEntry instanceLogEntry,ParameterHelper parameterHelper,ILogger logger,CancellationToken cancellationToken) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionExecutor.cs : 352\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.TryExecuteAsync(IFunctionInstance functionInstance,CancellationToken cancellationToken) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionExecutor.cs : 108", + "exception_details": "Microsoft.Azure.WebJobs.Host.FunctionInvocationException : Exception while executing function: Functions.hello ---> Microsoft.Azure.WebJobs.Script.Workers.Rpc.RpcException : Result: Failure\nException: Exception: Ka-booom!\nStack: File \"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\", line 479, in _handle__invocation_request\n call_result = await self._loop.run_in_executor(\n File \"/usr/local/lib/python3.9/concurrent/futures/thread.py\", line 58, in run\n result = self.fn(*self.args, **self.kwargs)\n File \"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\", line 752, in _run_sync_func\n return ExtensionManager.get_sync_invocation_wrapper(context,\n File \"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/extension.py\", line 215, in _raw_invocation_wrapper\n result = function(**args)\n File \"/home/site/wwwroot/hello/__init__.py\", line 22, in main\n raise Exception(\"Ka-booom!\")\n\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Script.Description.WorkerFunctionInvoker.InvokeCore(Object[] parameters,FunctionInvocationContext context) at /src/azure-functions-host/src/WebJobs.Script/Description/Workers/WorkerFunctionInvoker.cs : 101\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Script.Description.FunctionInvokerBase.Invoke(Object[] parameters) at /src/azure-functions-host/src/WebJobs.Script/Description/FunctionInvokerBase.cs : 82\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Script.Description.FunctionGenerator.Coerce[T](Task`1 src) at /src/azure-functions-host/src/WebJobs.Script/Description/FunctionGenerator.cs : 225\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionInvoker`2.InvokeAsync[TReflected,TReturnValue](Object instance,Object[] arguments) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionInvoker.cs : 52\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.InvokeWithTimeoutAsync(IFunctionInvoker invoker,ParameterHelper parameterHelper,CancellationTokenSource timeoutTokenSource,CancellationTokenSource functionCancellationTokenSource,Boolean throwOnTimeout,TimeSpan timerInterval,IFunctionInstance instance) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionExecutor.cs : 581\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithWatchersAsync(IFunctionInstanceEx instance,ParameterHelper parameterHelper,ILogger logger,CancellationTokenSource functionCancellationTokenSource) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionExecutor.cs : 527\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithLoggingAsync(IFunctionInstanceEx instance,FunctionStartedMessage message,FunctionInstanceLogEntry instanceLogEntry,ParameterHelper parameterHelper,ILogger logger,CancellationToken cancellationToken) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionExecutor.cs : 306\n End of inner exception\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithLoggingAsync(IFunctionInstanceEx instance,FunctionStartedMessage message,FunctionInstanceLogEntry instanceLogEntry,ParameterHelper parameterHelper,ILogger logger,CancellationToken cancellationToken) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionExecutor.cs : 352\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.TryExecuteAsync(IFunctionInstance functionInstance,CancellationToken cancellationToken) at D:\\a\\_work\\1\\s\\src\\Microsoft.Azure.WebJobs.Host\\Executors\\FunctionExecutor.cs : 108", "exception_message": "Result: Failure\nException: Exception: Ka-booom!\nStack: File \"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\", line 479, in _handle__invocation_request\n call_result = await self._loop.run_in_executor(\n File \"/usr/local/lib/python3.9/concurrent/futures/thread.py\", line 58, in run\n result = self.fn(*self.args, **self.kwargs)\n File \"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\", line 752, in _run_sync_func\n return ExtensionManager.get_sync_invocation_wrapper(context,\n File \"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/extension.py\", line 215, in _raw_invocation_wrapper\n result = function(**args)\n File \"/home/site/wwwroot/hello/__init__.py\", line 22, in main\n raise Exception(\"Ka-booom!\")\n", "exception_type": "Microsoft.Azure.WebJobs.Script.Workers.Rpc.RpcException", "host_instance_id": "8699ec76-436a-43e2-a811-371c8e1472d7", @@ -41,7 +41,7 @@ "version": "8.8.0" }, "event": { - "original": "{\"time\":\"2023-06-07T11:33:11Z\",\"resourceId\":\"/SUBSCRIPTIONS/12CABCB4-86E8-404F-A3D2-1DC9982F45CA/RESOURCEGROUPS/TEST-RG/PROVIDERS/MICROSOFT.WEB/SITES/TEST-FUNCTION\",\"category\":\"FunctionAppLogs\",\"operationName\":\"Microsoft.Web/sites/functions/log\",\"level\":\"Error\",\"location\":\"East US\",\"properties\":{\"appName\":\"mbranca-test-function\",\"roleInstance\":\"54108609-638217294083255145\",\"message\":\"Executed Functions.hello (Failed, Id=3a4b2e78-0549-4ebc-ba4c-ee9a6bc9a04e, Duration=16ms)\",\"category\":\"Function.hello\",\"hostVersion\":\"4.21.3.3\",\"functionInvocationId\":\"3a4b2e78-0549-4ebc-ba4c-ee9a6bc9a04e\",\"functionName\":\"Functions.hello\",\"hostInstanceId\":\"8699ec76-436a-43e2-a811-371c8e1472d7\",\"level\":\"Error\",\"levelId\":4,\"processId\":64,\"exceptionDetails\":\"Microsoft.Azure.WebJobs.Host.FunctionInvocationException : Exception while executing function: Functions.hello ---\u003e Microsoft.Azure.WebJobs.Script.Workers.Rpc.RpcException : Result: Failure\\nException: Exception: Ka-booom!\\nStack: File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\\\", line 479, in _handle__invocation_request\\n call_result = await self._loop.run_in_executor(\\n File \\\"/usr/local/lib/python3.9/concurrent/futures/thread.py\\\", line 58, in run\\n result = self.fn(*self.args, **self.kwargs)\\n File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\\\", line 752, in _run_sync_func\\n return ExtensionManager.get_sync_invocation_wrapper(context,\\n File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/extension.py\\\", line 215, in _raw_invocation_wrapper\\n result = function(**args)\\n File \\\"/home/site/wwwroot/hello/__init__.py\\\", line 22, in main\\n raise Exception(\\\"Ka-booom!\\\")\\n\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Script.Description.WorkerFunctionInvoker.InvokeCore(Object[] parameters,FunctionInvocationContext context) at /src/azure-functions-host/src/WebJobs.Script/Description/Workers/WorkerFunctionInvoker.cs : 101\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Script.Description.FunctionInvokerBase.Invoke(Object[] parameters) at /src/azure-functions-host/src/WebJobs.Script/Description/FunctionInvokerBase.cs : 82\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Script.Description.FunctionGenerator.Coerce[T](Task`1 src) at /src/azure-functions-host/src/WebJobs.Script/Description/FunctionGenerator.cs : 225\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionInvoker`2.InvokeAsync[TReflected,TReturnValue](Object instance,Object[] arguments) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionInvoker.cs : 52\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.InvokeWithTimeoutAsync(IFunctionInvoker invoker,ParameterHelper parameterHelper,CancellationTokenSource timeoutTokenSource,CancellationTokenSource functionCancellationTokenSource,Boolean throwOnTimeout,TimeSpan timerInterval,IFunctionInstance instance) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionExecutor.cs : 581\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithWatchersAsync(IFunctionInstanceEx instance,ParameterHelper parameterHelper,ILogger logger,CancellationTokenSource functionCancellationTokenSource) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionExecutor.cs : 527\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithLoggingAsync(IFunctionInstanceEx instance,FunctionStartedMessage message,FunctionInstanceLogEntry instanceLogEntry,ParameterHelper parameterHelper,ILogger logger,CancellationToken cancellationToken) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionExecutor.cs : 306\\n End of inner exception\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithLoggingAsync(IFunctionInstanceEx instance,FunctionStartedMessage message,FunctionInstanceLogEntry instanceLogEntry,ParameterHelper parameterHelper,ILogger logger,CancellationToken cancellationToken) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionExecutor.cs : 352\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.TryExecuteAsync(IFunctionInstance functionInstance,CancellationToken cancellationToken) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionExecutor.cs : 108\",\"exceptionMessage\":\"Result: Failure\\nException: Exception: Ka-booom!\\nStack: File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\\\", line 479, in _handle__invocation_request\\n call_result = await self._loop.run_in_executor(\\n File \\\"/usr/local/lib/python3.9/concurrent/futures/thread.py\\\", line 58, in run\\n result = self.fn(*self.args, **self.kwargs)\\n File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\\\", line 752, in _run_sync_func\\n return ExtensionManager.get_sync_invocation_wrapper(context,\\n File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/extension.py\\\", line 215, in _raw_invocation_wrapper\\n result = function(**args)\\n File \\\"/home/site/wwwroot/hello/__init__.py\\\", line 22, in main\\n raise Exception(\\\"Ka-booom!\\\")\\n\",\"exceptionType\":\"Microsoft.Azure.WebJobs.Script.Workers.Rpc.RpcException\",\"eventId\":3,\"eventName\":\"FunctionCompleted\"}}" + "original": "{\"time\":\"2023-06-07T11:33:11Z\",\"resourceId\":\"/SUBSCRIPTIONS/12CABCB4-86E8-404F-A3D2-1DC9982F45CA/RESOURCEGROUPS/TEST-RG/PROVIDERS/MICROSOFT.WEB/SITES/TEST-FUNCTION\",\"category\":\"FunctionAppLogs\",\"operationName\":\"Microsoft.Web/sites/functions/log\",\"level\":\"Error\",\"location\":\"East US\",\"properties\":{\"appName\":\"mbranca-test-function\",\"roleInstance\":\"54108609-638217294083255145\",\"message\":\"Executed Functions.hello (Failed, Id=3a4b2e78-0549-4ebc-ba4c-ee9a6bc9a04e, Duration=16ms)\",\"category\":\"Function.hello\",\"hostVersion\":\"4.21.3.3\",\"functionInvocationId\":\"3a4b2e78-0549-4ebc-ba4c-ee9a6bc9a04e\",\"functionName\":\"Functions.hello\",\"hostInstanceId\":\"8699ec76-436a-43e2-a811-371c8e1472d7\",\"level\":\"Error\",\"levelId\":4,\"processId\":64,\"exceptionDetails\":\"Microsoft.Azure.WebJobs.Host.FunctionInvocationException : Exception while executing function: Functions.hello ---> Microsoft.Azure.WebJobs.Script.Workers.Rpc.RpcException : Result: Failure\\nException: Exception: Ka-booom!\\nStack: File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\\\", line 479, in _handle__invocation_request\\n call_result = await self._loop.run_in_executor(\\n File \\\"/usr/local/lib/python3.9/concurrent/futures/thread.py\\\", line 58, in run\\n result = self.fn(*self.args, **self.kwargs)\\n File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\\\", line 752, in _run_sync_func\\n return ExtensionManager.get_sync_invocation_wrapper(context,\\n File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/extension.py\\\", line 215, in _raw_invocation_wrapper\\n result = function(**args)\\n File \\\"/home/site/wwwroot/hello/__init__.py\\\", line 22, in main\\n raise Exception(\\\"Ka-booom!\\\")\\n\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Script.Description.WorkerFunctionInvoker.InvokeCore(Object[] parameters,FunctionInvocationContext context) at /src/azure-functions-host/src/WebJobs.Script/Description/Workers/WorkerFunctionInvoker.cs : 101\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Script.Description.FunctionInvokerBase.Invoke(Object[] parameters) at /src/azure-functions-host/src/WebJobs.Script/Description/FunctionInvokerBase.cs : 82\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Script.Description.FunctionGenerator.Coerce[T](Task`1 src) at /src/azure-functions-host/src/WebJobs.Script/Description/FunctionGenerator.cs : 225\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionInvoker`2.InvokeAsync[TReflected,TReturnValue](Object instance,Object[] arguments) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionInvoker.cs : 52\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.InvokeWithTimeoutAsync(IFunctionInvoker invoker,ParameterHelper parameterHelper,CancellationTokenSource timeoutTokenSource,CancellationTokenSource functionCancellationTokenSource,Boolean throwOnTimeout,TimeSpan timerInterval,IFunctionInstance instance) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionExecutor.cs : 581\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithWatchersAsync(IFunctionInstanceEx instance,ParameterHelper parameterHelper,ILogger logger,CancellationTokenSource functionCancellationTokenSource) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionExecutor.cs : 527\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithLoggingAsync(IFunctionInstanceEx instance,FunctionStartedMessage message,FunctionInstanceLogEntry instanceLogEntry,ParameterHelper parameterHelper,ILogger logger,CancellationToken cancellationToken) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionExecutor.cs : 306\\n End of inner exception\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.ExecuteWithLoggingAsync(IFunctionInstanceEx instance,FunctionStartedMessage message,FunctionInstanceLogEntry instanceLogEntry,ParameterHelper parameterHelper,ILogger logger,CancellationToken cancellationToken) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionExecutor.cs : 352\\n at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw()\\n at async Microsoft.Azure.WebJobs.Host.Executors.FunctionExecutor.TryExecuteAsync(IFunctionInstance functionInstance,CancellationToken cancellationToken) at D:\\\\a\\\\_work\\\\1\\\\s\\\\src\\\\Microsoft.Azure.WebJobs.Host\\\\Executors\\\\FunctionExecutor.cs : 108\",\"exceptionMessage\":\"Result: Failure\\nException: Exception: Ka-booom!\\nStack: File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\\\", line 479, in _handle__invocation_request\\n call_result = await self._loop.run_in_executor(\\n File \\\"/usr/local/lib/python3.9/concurrent/futures/thread.py\\\", line 58, in run\\n result = self.fn(*self.args, **self.kwargs)\\n File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/dispatcher.py\\\", line 752, in _run_sync_func\\n return ExtensionManager.get_sync_invocation_wrapper(context,\\n File \\\"/azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker/extension.py\\\", line 215, in _raw_invocation_wrapper\\n result = function(**args)\\n File \\\"/home/site/wwwroot/hello/__init__.py\\\", line 22, in main\\n raise Exception(\\\"Ka-booom!\\\")\\n\",\"exceptionType\":\"Microsoft.Azure.WebJobs.Script.Workers.Rpc.RpcException\",\"eventId\":3,\"eventName\":\"FunctionCompleted\"}}" }, "observer": { "product": "Azure Functions", diff --git a/packages/azure_functions/data_stream/metrics/sample_event.json b/packages/azure_functions/data_stream/metrics/sample_event.json index 9e4f76f8bb9..c5a4a372f5f 100644 --- a/packages/azure_functions/data_stream/metrics/sample_event.json +++ b/packages/azure_functions/data_stream/metrics/sample_event.json @@ -1,144 +1,144 @@ { - "agent": { - "name": "docker-fleet-agent", - "id": "ae16c4cf-2550-452a-860d-cef5e5182e94", - "type": "metricbeat", - "ephemeral_id": "7511408f-f109-4e34-a405-98ad479fc097", - "version": "8.7.1" + "agent": { + "name": "docker-fleet-agent", + "id": "ae16c4cf-2550-452a-860d-cef5e5182e94", + "type": "metricbeat", + "ephemeral_id": "7511408f-f109-4e34-a405-98ad479fc097", + "version": "8.7.1" + }, + "@timestamp": "2023-08-23T12:20:00.000Z", + "ecs": { + "version": "8.0.0" + }, + "data_stream": { + "namespace": "default", + "type": "metrics", + "dataset": "azure.function" + }, + "service": { + "type": "azure" + }, + "host": { + "hostname": "docker-fleet-agent", + "os": { + "kernel": "5.15.49-linuxkit", + "codename": "focal", + "name": "Ubuntu", + "family": "debian", + "type": "linux", + "version": "20.04.6 LTS (Focal Fossa)", + "platform": "ubuntu" }, - "@timestamp": "2023-08-23T12:20:00.000Z", - "ecs": { - "version": "8.0.0" - }, - "data_stream": { - "namespace": "default", - "type": "metrics", - "dataset": "azure.function" - }, - "service": { - "type": "azure" - }, - "host": { - "hostname": "docker-fleet-agent", - "os": { - "kernel": "5.15.49-linuxkit", - "codename": "focal", - "name": "Ubuntu", - "family": "debian", - "type": "linux", - "version": "20.04.6 LTS (Focal Fossa)", - "platform": "ubuntu" - }, - "containerized": false, - "ip": [ - "172.19.0.9" - ], - "name": "docker-fleet-agent", - "id": "fd2c4b0943e444508c12855a04d117c7", - "mac": [ - "02-42-AC-13-00-09" - ], - "architecture": "x86_64" - }, - "elastic_agent": { - "id": "ae16c4cf-2550-452a-860d-cef5e5182e94", - "version": "8.7.1", - "snapshot": false - }, - "metricset": { - "period": 300000, - "name": "monitor" + "containerized": false, + "ip": [ + "172.19.0.9" + ], + "name": "docker-fleet-agent", + "id": "fd2c4b0943e444508c12855a04d117c7", + "mac": [ + "02-42-AC-13-00-09" + ], + "architecture": "x86_64" + }, + "elastic_agent": { + "id": "ae16c4cf-2550-452a-860d-cef5e5182e94", + "version": "8.7.1", + "snapshot": false + }, + "metricset": { + "period": 300000, + "name": "monitor" + }, + "event": { + "duration": 42827917228, + "agent_id_status": "verified", + "ingested": "2023-08-23T12:25:34Z", + "module": "azure", + "dataset": "azure.function" + }, + "azure": { + "subscription_id": "12hjkls-78tyu-404f-a3d2-1dc9982f45ds", + "timegrain": "PT5M", + "functions": { + "handles": { + "avg": 0 + }, + "app_connections": { + "avg": 0 + }, + "total_app_domains": { + "avg": 0 + }, + "http_response_time": { + "avg": 0.02796875 + }, + "bytes_received": { + "total": 28804 + }, + "average_memory_working_set": { + "avg": 328533059.5 + }, + "requests": { + "total": 32 + }, + "bytes_sent": { + "total": 8192 + }, + "requests_inapplication_queue": { + "avg": 0 + }, + "memory_working_set": { + "avg": 328533059.5 + }, + "io_write_bytes_per_second": { + "total": 0 + }, + "io_other_bytes_per_second": { + "total": 0 + }, + "total_app_domains_unloaded": { + "avg": 0 + }, + "io_other_operations_per_second": { + "total": 0 + }, + "io_read_bytes_per_second": { + "total": 31879 + }, + "function_execution_units": { + "total": 0 + }, + "io_read_operations_per_second": { + "total": 0 + }, + "http2xx": { + "total": 16 + }, + "http3xx": { + "total": 0 + }, + "http4xx": { + "total": 0 + }, + "io_write_operations_per_second": { + "total": 0 + }, + "function_execution_count": { + "total": 0 + }, + "http5xx": { + "total": 16 + } }, - "event": { - "duration": 42827917228, - "agent_id_status": "verified", - "ingested": "2023-08-23T12:25:34Z", - "module": "azure", - "dataset": "azure.function" + "resource": { + "name": "return-of-the-jedi", + "id": "/subscriptions/12hjkls-78tyu-404f-a3d2-1dc9982f45ds/resourceGroups/test-rg/providers/Microsoft.Web/sites/return-of-the-jedi", + "type": "Microsoft.Web/sites", + "group": "test-rg", + "tags": { + "hidden-link: /app-insights-resource-id": "/subscriptions/12hjkls-78tyu-404f-a3d2-1dc9982f45ds/resourceGroups/test-rg/providers/Microsoft.Insights/components/return-of-the-jedi" + } }, - "azure": { - "subscription_id": "12hjkls-78tyu-404f-a3d2-1dc9982f45ds", - "timegrain": "PT5M", - "functions": { - "handles": { - "avg": 0 - }, - "app_connections": { - "avg": 0 - }, - "total_app_domains": { - "avg": 0 - }, - "http_response_time": { - "avg": 0.02796875 - }, - "bytes_received": { - "total": 28804 - }, - "average_memory_working_set": { - "avg": 328533059.5 - }, - "requests": { - "total": 32 - }, - "bytes_sent": { - "total": 8192 - }, - "requests_inapplication_queue": { - "avg": 0 - }, - "memory_working_set": { - "avg": 328533059.5 - }, - "io_write_bytes_per_second": { - "total": 0 - }, - "io_other_bytes_per_second": { - "total": 0 - }, - "total_app_domains_unloaded": { - "avg": 0 - }, - "io_other_operations_per_second": { - "total": 0 - }, - "io_read_bytes_per_second": { - "total": 31879 - }, - "function_execution_units": { - "total": 0 - }, - "io_read_operations_per_second": { - "total": 0 - }, - "http2xx": { - "total": 16 - }, - "http3xx": { - "total": 0 - }, - "http4xx": { - "total": 0 - }, - "io_write_operations_per_second": { - "total": 0 - }, - "function_execution_count": { - "total": 0 - }, - "http5xx": { - "total": 16 - } - }, - "resource": { - "name": "return-of-the-jedi", - "id": "/subscriptions/12hjkls-78tyu-404f-a3d2-1dc9982f45ds/resourceGroups/test-rg/providers/Microsoft.Web/sites/return-of-the-jedi", - "type": "Microsoft.Web/sites", - "group": "test-rg", - "tags": { - "hidden-link: /app-insights-resource-id": "/subscriptions/12hjkls-78tyu-404f-a3d2-1dc9982f45ds/resourceGroups/test-rg/providers/Microsoft.Insights/components/return-of-the-jedi" - } - }, - "namespace": "Microsoft.Web/sites" + "namespace": "Microsoft.Web/sites" } } \ No newline at end of file diff --git a/packages/azure_functions/manifest.yml b/packages/azure_functions/manifest.yml index 6df9a58e63b..b094e9af604 100644 --- a/packages/azure_functions/manifest.yml +++ b/packages/azure_functions/manifest.yml @@ -1,7 +1,7 @@ -format_version: 2.5.1 +format_version: "3.0.0" name: azure_functions title: "Azure Functions" -version: 0.1.0 +version: "0.2.0" source: license: "Elastic-2.0" description: "Get metrics and logs from Azure Functions" @@ -10,8 +10,10 @@ categories: - azure - cloud conditions: - kibana.version: "^8.8.1" - elastic.subscription: "basic" + kibana: + version: "^8.8.1" + elastic: + subscription: "basic" vars: - name: resource_manager_endpoint type: text @@ -58,3 +60,4 @@ policy_templates: input_group: metrics owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/azure_functions/validation.yml b/packages/azure_functions/validation.yml new file mode 100644 index 00000000000..efdb1de132d --- /dev/null +++ b/packages/azure_functions/validation.yml @@ -0,0 +1,4 @@ +errors: + exclude_checks: + - SVR00004 + - SVR00002 diff --git a/packages/cassandra/_dev/deploy/variants.yml b/packages/cassandra/_dev/deploy/variants.yml index 03172eb76f3..ab8731a46cb 100644 --- a/packages/cassandra/_dev/deploy/variants.yml +++ b/packages/cassandra/_dev/deploy/variants.yml @@ -1,4 +1,4 @@ variants: - v3.11.11: + "v3.11.11": SERVICE_VERSION: 3.11.11 default: v3.11.11 diff --git a/packages/cassandra/changelog.yml b/packages/cassandra/changelog.yml index fd8995b14bc..b1fbe519dfd 100644 --- a/packages/cassandra/changelog.yml +++ b/packages/cassandra/changelog.yml @@ -1,4 +1,9 @@ # newer versions go on top +- version: 1.10.0 + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "1.9.2" changes: - description: Fix the type for `log.flags` field. diff --git a/packages/cassandra/data_stream/log/_dev/test/pipeline/test-cassandra.log-expected.json b/packages/cassandra/data_stream/log/_dev/test/pipeline/test-cassandra.log-expected.json index 349a4d074d1..52e9860ef34 100644 --- a/packages/cassandra/data_stream/log/_dev/test/pipeline/test-cassandra.log-expected.json +++ b/packages/cassandra/data_stream/log/_dev/test/pipeline/test-cassandra.log-expected.json @@ -9,7 +9,7 @@ "category": [ "database" ], - "ingested": "2023-01-19T09:30:52.993978173Z", + "ingested": "2023-10-11T21:02:06.332290589Z", "kind": "event", "module": "cassandra", "original": "INFO [main] 2021-07-21 12:18:15,910 YamlConfigurationLoader.java:92 - Configuration location: file:/C:/Users/kush.rana/Desktop/Projects/elasticconnectors/apache-cassandra-3.11.10/conf/cassandra.yaml", @@ -48,7 +48,7 @@ "category": [ "database" ], - "ingested": "2023-01-19T09:30:52.993987173Z", + "ingested": "2023-10-11T21:02:06.332297964Z", "kind": "event", "module": "cassandra", "original": "INFO [nioEventLoopGroup-2-1] 2021-07-21 12:23:32,856 Message.java:826 - Unexpected exception during request; channel = [id: 0xa6112238, L:/127.0.0.1:9042 - R:/127.0.0.1:60106]\njava.io.IOException: An existing connection was forcibly closed by the remote host\n\tat sun.nio.ch.SocketDispatcher.read0(Native Method) ~[na:1.8.0_291]\n\tat sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:43) ~[na:1.8.0_291]\n\tat sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:223) ~[na:1.8.0_291]\n\tat sun.nio.ch.IOUtil.read(IOUtil.java:192) ~[na:1.8.0_291]\n\tat sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:378) ~[na:1.8.0_291]\n\tat io.netty.buffer.PooledUnsafeDirectByteBuf.setBytes(PooledUnsafeDirectByteBuf.java:221) ~[netty-all-4.0.44.Final.jar:4.0.44.Final]\n\tat io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:899) ~[netty-all-4.0.44.Final.jar:4.0.44.Final]\n\tat io.netty.channel.socket.nio.NioSocketChannel.doReadBytes(NioSocketChannel.java:276) ~[netty-all-4.0.44.Final.jar:4.0.44.Final]\n\tat io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:119) ~[netty-all-4.0.44.Final.jar:4.0.44.Final]\n\tat io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:643) [netty-all-4.0.44.Final.jar:4.0.44.Final]\n\tat io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:566) [netty-all-4.0.44.Final.jar:4.0.44.Final]\n\tat io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:480) [netty-all-4.0.44.Final.jar:4.0.44.Final]\n\tat io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:442) [netty-all-4.0.44.Final.jar:4.0.44.Final]\n\tat io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:131) [netty-all-4.0.44.Final.jar:4.0.44.Final]\n\tat io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144) [netty-all-4.0.44.Final.jar:4.0.44.Final]\n\tat java.lang.Thread.run(Thread.java:748) [na:1.8.0_291]", @@ -87,7 +87,7 @@ "category": [ "database" ], - "ingested": "2023-01-19T09:30:52.993988382Z", + "ingested": "2023-10-11T21:02:06.332299172Z", "kind": "event", "module": "cassandra", "original": "INFO [main] 2021-07-22 15:16:10,134 StorageService.java:681 - Token metadata: Normal Tokens:\nlocalhost/127.0.0.1:[-9213409579976581007, -9208536521948527928, -9138148778360337250, -9127679418115980016, -8976989747173636951, -8928929930114662189, -8850922876008716693, -8742572573666294013, -8682790054622712291, -8602448569428641857, -8427444260690062087, -8322248594442534434, -8320001287302042084, -8254990830747485697, -8249957862499495790, -8189195602746080394, -8129887952616066480, -8056374640546524116, -8038761382995613210, -8037170110133462140, -7992011367541245422, -7880071995925335357, -7749009042440224717, -7617052974926155684, -7579992376331554614, -7547752740136860669, -7483309149196458963, -7403928085586378365, -7295126747625344073, -7285427200042709300, -7208686794243768519, -7117693125722831897, -6883722024573005241, -6720612224987504455, -6717012773506031630, -6634852508780100068, -6552838126854940220, -6342653427563558228, -6320996911175515573, -5938863381584705682, -5937138028307335156, -5863008324229999355, -5557902421394732795, -5507288642909724159, -5359164966940526093, -5270856020393654677, -5140336908990033565, -5086699786885043460, -5074000871253631366, -5073978841190866944, -5033423903877820246, -4943085258474057592, -4910543332786614489, -4879770268169959108, -4850099303163390815, -4807455567471793547, -4719159438210854274, -4619781027025280965, -4544597174348575652, -4485428936415138994, -4472739122485282685, -4373079504728124492, -4181301628202154089, -4126388626984258138, -4023053100851092991, -3922047779752166298, -3906718855618646650, -3868634824115722238, -3745879358169402556, -3690097830756775997, -3670927950016768037, -3616747246231509319, -3591426209432189634, -3490702446505255801, -3483385134981416558, -3429491430707856745, -3416053347785698066, -3413943292199611161, -3382417634862376426, -3349947830775906953, -3296542850568272055, -3176473778862080919, -3166346545442410180, -3132772882761113905, -3043642279006642678, -2954123492179139102, -2863171390983702241, -2654073953489285272, -2590940177673686644, -2587080257265847771, -2251605852203586301, -2198878564462361531, -2145896005604227928, -2124367674677225113, -2100796974152514459, -2100767007153930839, -2048246436708511951, -1940231583736341613, -1918276191425512270, -1901156852629539284, -1880561834882743663, -1746302695032098428, -1656872679451617615, -1548673391155746482, -1488479865253002047, -1379516031698548835, -1375032214470929604, -1314590380943603420, -1308375119031032813, -1285371162065374453, -1236769884793871491, -1079440989079766338, -869710792032024432, -789236985842188234, -455105751942515560, -444051516445821516, -361900680170942718, -168328735467397181, -134714950793282931, 60826037083339962, 84541622509618720, 144554169302448103, 196774206633592259, 198562324344386301, 238610642455775463, 278056991129882380, 279880939653579207, 282592745265744974, 410971596053594328, 495924051707529203, 526093410792433875, 575034598008591456, 579720708854555015, 589085295330075427, 590435486579344463, 776031051171600786, 781641065494177208, 846457757495601745, 850257452008846361, 900686269816010653, 1051126519181924424, 1203321605454295227, 1203895566636331975, 1211465458560787736, 1225960882717793263, 1241774140877981683, 1292414550010972366, 1334361932455738343, 1337377210995562847, 1419081732672037948, 1419381122950575881, 1490964106631451820, 1639326652578045878, 1705770160551872440, 1832285348848161986, 1847497393690487631, 1879480685217394570, 1891538878205257739, 1989643415414534081, 1994910556424959679, 1998064184769937808, 2155104014043103344, 2220336291482261989, 2283828190095512675, 2306824176141659302, 2451278539468942429, 2598912773538613706, 2762278292210005010, 2773437737237250294, 2832174370654635432, 2934442852597210012, 2945700216164622238, 2952556039929943093, 3107437902315470865, 3123397929954224804, 3186025726071011271, 3188208477416257432, 3200767480243736192, 3215706690514302654, 3276105622621268635, 3365884355288559483, 3401162000846683197, 3490150748579176060, 3504387869819626580, 3724159158070359927, 3885983949513817526, 3928206314063268933, 4115069486609707213, 4242350427466708713, 4386862705409556464, 4404914535424634841, 4486289098334426088, 4615905347775520925, 4650730553761950776, 4664032552573343869, 4741710988150565521, 4824314289830954773, 4855150021956252527, 4979447180744678768, 5010835932690867774, 5135899429065919974, 5230763194691689473, 5318582515931199681, 5558115297723062617, 5588210318754588806, 5615741481489697481, 5681975707573416981, 5826288747232227118, 6135206485758434356, 6163357531758535338, 6192919223175738327, 6193503193865707083, 6251171521552312359, 6370634172051452277, 6440400015024985128, 6444714481840758288, 6619609681089611411, 6698206135058175326, 6723548174665139367, 6744705100400054244, 6777227783060561616, 6840248229536654533, 6903596833014731591, 6939254918732562615, 6986844194665101330, 6999783490815861150, 7081735174733692632, 7088499763485696293, 7164209991011322273, 7180706869985461496, 7369129159637618981, 7375016321335570664, 7518640636490654215, 7584590997557423209, 7665483325181367542, 7751011211020015083, 7808711095476824306, 7905270090323370693, 7930480210183871741, 8027805212938904497, 8086730665555500916, 8156173380772627797, 8455783018781361766, 8522975246241517657, 8525946248784902240, 8547978838832864285, 8580320166686867118, 8587722170086323264, 8726678858413102621, 8732783454936618116, 8734728266269135300, 8789489738850249699, 8799590254811299859, 8825243461020618263, 9047662432134874749, 9087960439561935209]\n", diff --git a/packages/cassandra/data_stream/log/_dev/test/pipeline/test-common-config.yml b/packages/cassandra/data_stream/log/_dev/test/pipeline/test-common-config.yml index 701d3977061..9dffc0e5904 100644 --- a/packages/cassandra/data_stream/log/_dev/test/pipeline/test-common-config.yml +++ b/packages/cassandra/data_stream/log/_dev/test/pipeline/test-common-config.yml @@ -1,5 +1,5 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" fields: tags: - preserve_original_event diff --git a/packages/cassandra/data_stream/metrics/elasticsearch/ingest_pipeline/default.yml b/packages/cassandra/data_stream/metrics/elasticsearch/ingest_pipeline/default.yml index 7470e526ebf..12af7fe3a58 100644 --- a/packages/cassandra/data_stream/metrics/elasticsearch/ingest_pipeline/default.yml +++ b/packages/cassandra/data_stream/metrics/elasticsearch/ingest_pipeline/default.yml @@ -18,12 +18,12 @@ processors: ignore_failure: true - set: field: event.category - value: database + value: [database] ignore_empty_value: true ignore_failure: true - set: field: event.type - value: info + value: [info] ignore_empty_value: true ignore_failure: true - set: diff --git a/packages/cassandra/data_stream/metrics/manifest.yml b/packages/cassandra/data_stream/metrics/manifest.yml index 6a989f92aaa..6711f53bcb8 100644 --- a/packages/cassandra/data_stream/metrics/manifest.yml +++ b/packages/cassandra/data_stream/metrics/manifest.yml @@ -14,4 +14,4 @@ streams: default: 10s template_path: "stream.yml.hbs" elasticsearch: - index_mode: "time_series" \ No newline at end of file + index_mode: "time_series" diff --git a/packages/cassandra/data_stream/metrics/sample_event.json b/packages/cassandra/data_stream/metrics/sample_event.json index 87c8d4877e1..8ab84497048 100644 --- a/packages/cassandra/data_stream/metrics/sample_event.json +++ b/packages/cassandra/data_stream/metrics/sample_event.json @@ -177,14 +177,18 @@ }, "event": { "agent_id_status": "verified", - "category": "database", + "category": [ + "database" + ], "created": "2022-08-02T07:46:20.906Z", "dataset": "cassandra.metrics", "duration": 13448617, "ingested": "2022-08-02T07:46:24Z", "kind": "event", "module": "cassandra", - "type": "info" + "type": [ + "info" + ] }, "host": { "architecture": "x86_64", diff --git a/packages/cassandra/docs/README.md b/packages/cassandra/docs/README.md index d1a8168500e..f1e5571a97a 100644 --- a/packages/cassandra/docs/README.md +++ b/packages/cassandra/docs/README.md @@ -296,14 +296,18 @@ An example event for `metrics` looks as following: }, "event": { "agent_id_status": "verified", - "category": "database", + "category": [ + "database" + ], "created": "2022-08-02T07:46:20.906Z", "dataset": "cassandra.metrics", "duration": 13448617, "ingested": "2022-08-02T07:46:24Z", "kind": "event", "module": "cassandra", - "type": "info" + "type": [ + "info" + ] }, "host": { "architecture": "x86_64", diff --git a/packages/cassandra/manifest.yml b/packages/cassandra/manifest.yml index cfd86ef02de..34d4680c712 100644 --- a/packages/cassandra/manifest.yml +++ b/packages/cassandra/manifest.yml @@ -1,16 +1,17 @@ -format_version: 1.0.0 +format_version: "3.0.0" name: cassandra title: Cassandra -version: "1.9.2" -license: basic +version: "1.10.0" description: This Elastic integration collects logs and metrics from cassandra. type: integration categories: - datastore - observability -release: ga conditions: - kibana.version: "^8.8.0" + kibana: + version: "^8.8.0" + elastic: + subscription: basic screenshots: - src: /img/[Metrics Cassandra] Overview.png title: Overview Dashboard @@ -68,3 +69,4 @@ policy_templates: description: Collecting metrics from Cassandra using jolokia. owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/cassandra/validation.yml b/packages/cassandra/validation.yml new file mode 100644 index 00000000000..efdb1de132d --- /dev/null +++ b/packages/cassandra/validation.yml @@ -0,0 +1,4 @@ +errors: + exclude_checks: + - SVR00004 + - SVR00002 diff --git a/packages/ceph/changelog.yml b/packages/ceph/changelog.yml index a6e24b4b85c..759662b9226 100644 --- a/packages/ceph/changelog.yml +++ b/packages/ceph/changelog.yml @@ -1,4 +1,9 @@ # newer versions go on top +- version: 1.1.0 + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "1.0.1" changes: - description: Add null check and ignore_missing check to the rename processor diff --git a/packages/ceph/data_stream/cluster_status/fields/fields.yml b/packages/ceph/data_stream/cluster_status/fields/fields.yml index ce1a1209d47..bdb10bc0a92 100644 --- a/packages/ceph/data_stream/cluster_status/fields/fields.yml +++ b/packages/ceph/data_stream/cluster_status/fields/fields.yml @@ -73,8 +73,15 @@ description: Number of Placement Groups (pgs) in cluster. metric_type: gauge - name: state - type: object + type: group description: Placement Groups (pgs) state information. + fields: + - name: count + type: long + description: Total number of Placement Groups (pgs) in cluster. + - name: state_name + type: keyword + description: Represents the current status of individual Placement Groups (pgs). - name: total.bytes type: long description: Total bytes of the cluster. diff --git a/packages/ceph/docs/README.md b/packages/ceph/docs/README.md index c801cbc4632..666606ab0c2 100644 --- a/packages/ceph/docs/README.md +++ b/packages/ceph/docs/README.md @@ -528,7 +528,8 @@ An example event for `cluster_status` looks as following: | ceph.cluster_status.pg.degraded.ratio | Degraded objects ratio in Placement Groups (pgs). | double | | gauge | | ceph.cluster_status.pg.degraded.total.count | Total degraded Placement Groups (pgs). | long | | counter | | ceph.cluster_status.pg.remapped.count | Number of Placement Groups (pgs) in cluster. | long | | gauge | -| ceph.cluster_status.pg.state | Placement Groups (pgs) state information. | object | | | +| ceph.cluster_status.pg.state.count | Total number of Placement Groups (pgs) in cluster. | long | | | +| ceph.cluster_status.pg.state.state_name | Represents the current status of individual Placement Groups (pgs). | keyword | | | | ceph.cluster_status.pg.total.bytes | Total bytes of the cluster. | long | byte | gauge | | ceph.cluster_status.pg.used.bytes | Used bytes of the cluster. | long | byte | gauge | | ceph.cluster_status.pool.count | Number of pools in the cluster. | long | | gauge | diff --git a/packages/ceph/manifest.yml b/packages/ceph/manifest.yml index c637ca24a3a..44deb30c12d 100644 --- a/packages/ceph/manifest.yml +++ b/packages/ceph/manifest.yml @@ -1,15 +1,17 @@ -format_version: 2.0.0 +format_version: "3.0.0" name: ceph title: Ceph -version: "1.0.1" +version: "1.1.0" description: This Elastic integration collects metrics from Ceph instance. type: integration categories: - datastore - os_system conditions: - kibana.version: ^8.7.1 - elastic.subscription: basic + kibana: + version: ^8.7.1 + elastic: + subscription: basic screenshots: - src: /img/ceph-overview-dashboard-cluster-metrics.png title: Ceph cluster metrics overview @@ -96,3 +98,4 @@ policy_templates: # -----END CERTIFICATE----- owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/ceph/validation.yml b/packages/ceph/validation.yml new file mode 100644 index 00000000000..bcc8f74ac3a --- /dev/null +++ b/packages/ceph/validation.yml @@ -0,0 +1,3 @@ +errors: + exclude_checks: + - SVR00002 diff --git a/packages/cockroachdb/changelog.yml b/packages/cockroachdb/changelog.yml index 85dbcf5e7bb..915f819db15 100644 --- a/packages/cockroachdb/changelog.yml +++ b/packages/cockroachdb/changelog.yml @@ -1,8 +1,13 @@ +- version: 1.7.0 + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "1.6.0" changes: - - description: Enable time series data streams for the metrics datasets. This dramatically reduces storage for metrics and is expected to progressively improve query performance. For more details, see https://www.elastic.co/guide/en/elasticsearch/reference/current/tsds.html. - type: enhancement - link: https://github.com/elastic/integrations/pull/6774 + - description: Enable time series data streams for the metrics datasets. This dramatically reduces storage for metrics and is expected to progressively improve query performance. For more details, see https://www.elastic.co/guide/en/elasticsearch/reference/current/tsds.html. + type: enhancement + link: https://github.com/elastic/integrations/pull/6774 - version: "1.5.1" changes: - description: Revert metrics field definition to the format used before introducing metric_type. @@ -68,7 +73,6 @@ - description: Update to ECS 8.0 type: enhancement link: https://github.com/elastic/integrations/pull/2484 -# newer versions go on top - version: "0.2.0" changes: - description: Support Kibana 8.0 diff --git a/packages/cockroachdb/data_stream/status/fields/agent.yml b/packages/cockroachdb/data_stream/status/fields/agent.yml index d00130ad600..4ee28c0d22f 100644 --- a/packages/cockroachdb/data_stream/status/fields/agent.yml +++ b/packages/cockroachdb/data_stream/status/fields/agent.yml @@ -57,33 +57,6 @@ - name: image.id type: keyword description: Image ID for the cloud instance. -- name: container - title: Container - group: 2 - description: "Container fields are used for meta information about the specific container that is the source of information.\nThese fields help correlate data based containers from any runtime." - type: group - fields: - - name: id - level: core - type: keyword - ignore_above: 1024 - description: Unique container id. - dimension: true - - name: image.name - level: extended - type: keyword - ignore_above: 1024 - description: Name of the image the container was built on. - - name: labels - level: extended - type: object - object_type: keyword - description: Image labels. - - name: name - level: extended - type: keyword - ignore_above: 1024 - description: Container name. - name: host title: Host group: 2 diff --git a/packages/cockroachdb/data_stream/status/fields/ecs.yml b/packages/cockroachdb/data_stream/status/fields/ecs.yml index 9175f0f0c13..280c4a8008b 100644 --- a/packages/cockroachdb/data_stream/status/fields/ecs.yml +++ b/packages/cockroachdb/data_stream/status/fields/ecs.yml @@ -14,5 +14,7 @@ external: ecs - name: container.image.name external: ecs +- name: container.labels + external: ecs - name: host external: ecs diff --git a/packages/cockroachdb/data_stream/status/fields/fields.yml b/packages/cockroachdb/data_stream/status/fields/fields.yml index 01dbc50043e..3cd1056d39f 100644 --- a/packages/cockroachdb/data_stream/status/fields/fields.yml +++ b/packages/cockroachdb/data_stream/status/fields/fields.yml @@ -28,7 +28,6 @@ object_type_mapping_type: "*" description: >- Prometheus histogram metric - - name: cockroachdb.status type: group fields: diff --git a/packages/cockroachdb/data_stream/status/manifest.yml b/packages/cockroachdb/data_stream/status/manifest.yml index b9fcb107d4b..eaae20c3e71 100644 --- a/packages/cockroachdb/data_stream/status/manifest.yml +++ b/packages/cockroachdb/data_stream/status/manifest.yml @@ -58,9 +58,11 @@ streams: show_user: true title: Status description: Collect CockroachDB status metrics - elasticsearch: index_mode: "time_series" index_template: settings: - index.mapping.dimension_fields.limit: 32 \ No newline at end of file + index: + mapping: + dimension_fields: + limit: 32 diff --git a/packages/cockroachdb/manifest.yml b/packages/cockroachdb/manifest.yml index 821ad73084d..213f31683ea 100644 --- a/packages/cockroachdb/manifest.yml +++ b/packages/cockroachdb/manifest.yml @@ -1,7 +1,6 @@ name: cockroachdb title: CockroachDB Metrics -version: "1.6.0" -release: ga +version: "1.7.0" description: Collect metrics from CockroachDB servers with Elastic Agent. type: integration icons: @@ -14,13 +13,15 @@ screenshots: title: CockroachDB metrics overview size: 5120x2562 type: image/png -format_version: 1.0.0 -license: basic +format_version: "3.0.0" categories: - observability - datastore conditions: - kibana.version: "^8.9.0" + kibana: + version: "^8.9.0" + elastic: + subscription: basic vars: - name: hosts type: text @@ -53,3 +54,4 @@ policy_templates: type: image/png owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/cockroachdb/validation.yml b/packages/cockroachdb/validation.yml new file mode 100644 index 00000000000..bcc8f74ac3a --- /dev/null +++ b/packages/cockroachdb/validation.yml @@ -0,0 +1,3 @@ +errors: + exclude_checks: + - SVR00002 diff --git a/packages/coredns/changelog.yml b/packages/coredns/changelog.yml index a36543093d4..cfb69dd2ef8 100644 --- a/packages/coredns/changelog.yml +++ b/packages/coredns/changelog.yml @@ -1,4 +1,9 @@ # newer versions go on top +- version: 0.6.0 + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "0.5.0" changes: - description: Adapt fields for changes in file system info diff --git a/packages/coredns/data_stream/log/_dev/test/pipeline/test-coredns-error.log-expected.json b/packages/coredns/data_stream/log/_dev/test/pipeline/test-coredns-error.log-expected.json index 02898d84fb9..fa7ae22e1ed 100644 --- a/packages/coredns/data_stream/log/_dev/test/pipeline/test-coredns-error.log-expected.json +++ b/packages/coredns/data_stream/log/_dev/test/pipeline/test-coredns-error.log-expected.json @@ -100,7 +100,7 @@ "coredns": { "log": { "error": { - "message": "read tcp 10.100.8.34:39436-\u003e1.0.0.1:853: i/o timeout" + "message": "read tcp 10.100.8.34:39436->1.0.0.1:853: i/o timeout" } } }, @@ -122,7 +122,7 @@ "network" ], "kind": "event", - "original": "[ERROR] plugin/errors: 2 ims-prod07.adobelogin.com. HTTPS: read tcp 10.100.8.34:39436-\u003e1.0.0.1:853: i/o timeout", + "original": "[ERROR] plugin/errors: 2 ims-prod07.adobelogin.com. HTTPS: read tcp 10.100.8.34:39436->1.0.0.1:853: i/o timeout", "outcome": "failure", "type": [ "protocol" @@ -148,7 +148,7 @@ "coredns": { "log": { "error": { - "message": "read udp 10.100.0.1:39548-\u003e10.100.0.1:8600: i/o timeout" + "message": "read udp 10.100.0.1:39548->10.100.0.1:8600: i/o timeout" } } }, @@ -167,7 +167,7 @@ "network" ], "kind": "event", - "original": "[ERROR] plugin/errors: 2 active.vault.service.va.consul. AAAA: read udp 10.100.0.1:39548-\u003e10.100.0.1:8600: i/o timeout", + "original": "[ERROR] plugin/errors: 2 active.vault.service.va.consul. AAAA: read udp 10.100.0.1:39548->10.100.0.1:8600: i/o timeout", "outcome": "failure", "type": [ "protocol" diff --git a/packages/coredns/manifest.yml b/packages/coredns/manifest.yml index d95313a5130..99eb5565419 100644 --- a/packages/coredns/manifest.yml +++ b/packages/coredns/manifest.yml @@ -1,14 +1,16 @@ -format_version: 2.0.0 +format_version: "3.0.0" name: coredns title: "CoreDNS" -version: "0.5.0" +version: "0.6.0" description: "Collect logs from CoreDNS instances with Elastic Agent." type: integration categories: - observability conditions: - kibana.version: "^8.0.0" - elastic.subscription: "basic" + kibana: + version: "^8.0.0" + elastic: + subscription: "basic" screenshots: - src: /img/coredns-overview.png title: CoreDNS Overview dashboard @@ -32,3 +34,4 @@ policy_templates: description: "Collect logs from CoreDNS instances (input: journald)" owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/coredns/validation.yml b/packages/coredns/validation.yml new file mode 100644 index 00000000000..bcc8f74ac3a --- /dev/null +++ b/packages/coredns/validation.yml @@ -0,0 +1,3 @@ +errors: + exclude_checks: + - SVR00002 diff --git a/packages/couchbase/_dev/deploy/variants.yml b/packages/couchbase/_dev/deploy/variants.yml index 440984af56c..3565316b322 100644 --- a/packages/couchbase/_dev/deploy/variants.yml +++ b/packages/couchbase/_dev/deploy/variants.yml @@ -1,4 +1,4 @@ variants: - v7.1.0: + "v7.1.0": COUCHBASE_VERSION: 7.1.0 default: v7.1.0 diff --git a/packages/couchbase/changelog.yml b/packages/couchbase/changelog.yml index ec1bce36784..901aadf820c 100644 --- a/packages/couchbase/changelog.yml +++ b/packages/couchbase/changelog.yml @@ -1,3 +1,8 @@ +- version: 1.4.0 + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "1.3.1" changes: - description: Remove forwarded tag from metrics data streams. diff --git a/packages/couchbase/data_stream/bucket/_dev/test/pipeline/test-common-config.yml b/packages/couchbase/data_stream/bucket/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchbase/data_stream/bucket/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchbase/data_stream/bucket/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchbase/data_stream/cache/_dev/test/pipeline/test-common-config.yml b/packages/couchbase/data_stream/cache/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchbase/data_stream/cache/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchbase/data_stream/cache/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchbase/data_stream/cbl_replication/_dev/test/pipeline/test-common-config.yml b/packages/couchbase/data_stream/cbl_replication/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchbase/data_stream/cbl_replication/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchbase/data_stream/cbl_replication/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchbase/data_stream/cluster/_dev/test/pipeline/test-common-config.yml b/packages/couchbase/data_stream/cluster/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchbase/data_stream/cluster/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchbase/data_stream/cluster/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchbase/data_stream/database_stats/_dev/test/pipeline/test-common-config.yml b/packages/couchbase/data_stream/database_stats/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchbase/data_stream/database_stats/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchbase/data_stream/database_stats/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchbase/data_stream/miscellaneous/_dev/test/pipeline/test-common-config.yml b/packages/couchbase/data_stream/miscellaneous/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchbase/data_stream/miscellaneous/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchbase/data_stream/miscellaneous/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchbase/data_stream/node/_dev/test/pipeline/test-common-config.yml b/packages/couchbase/data_stream/node/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchbase/data_stream/node/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchbase/data_stream/node/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchbase/data_stream/query_index/_dev/test/pipeline/test-common-config.yml b/packages/couchbase/data_stream/query_index/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchbase/data_stream/query_index/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchbase/data_stream/query_index/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchbase/data_stream/query_index/fields/fields.yml b/packages/couchbase/data_stream/query_index/fields/fields.yml index ffec08afb1a..cc7dab14a6d 100644 --- a/packages/couchbase/data_stream/query_index/fields/fields.yml +++ b/packages/couchbase/data_stream/query_index/fields/fields.yml @@ -3,32 +3,32 @@ fields: - name: eventing.failed.count type: float - metrics_type: gauge + metric_type: gauge description: Total number of failed eventing function operations. - name: ram type: group fields: - name: pct type: float - metrics_type: gauge + metric_type: gauge description: The percentage of index entries in ram. - name: remaining type: float - metrics_type: gauge + metric_type: gauge description: The amount of memory remaining. - name: query type: group fields: - name: requests type: float - metrics_type: gauge + metric_type: gauge description: Current number of requests per second. - name: request_time.avg type: float - metrics_type: gauge + metric_type: gauge unit: s description: Average total request time. - name: result.count type: float - metrics_type: gauge + metric_type: gauge description: Number of results returned. diff --git a/packages/couchbase/data_stream/resource/_dev/test/pipeline/test-common-config.yml b/packages/couchbase/data_stream/resource/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchbase/data_stream/resource/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchbase/data_stream/resource/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchbase/data_stream/xdcr/_dev/test/pipeline/test-common-config.yml b/packages/couchbase/data_stream/xdcr/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchbase/data_stream/xdcr/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchbase/data_stream/xdcr/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchbase/docs/README.md b/packages/couchbase/docs/README.md index 34247b476a7..878dbc1805d 100644 --- a/packages/couchbase/docs/README.md +++ b/packages/couchbase/docs/README.md @@ -1524,38 +1524,38 @@ An example event for `query_index` looks as following: **Exported fields** -| Field | Description | Type | Unit | -|---|---|---|---| -| @timestamp | Event timestamp. | date | | -| agent.id | Unique identifier of this agent (if one exists). Example: For Beats this would be beat.id. | keyword | | -| cloud.account.id | The cloud account or organization id used to identify different entities in a multi-tenant environment. Examples: AWS account id, Google Cloud ORG Id, or other unique identifier. | keyword | | -| cloud.availability_zone | Availability zone in which this host, resource, or service is located. | keyword | | -| cloud.instance.id | Instance ID of the host machine. | keyword | | -| cloud.provider | Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean. | keyword | | -| cloud.region | Region in which this host, resource, or service is located. | keyword | | -| container.id | Unique container id. | keyword | | -| couchbase.query_index.eventing.failed.count | Total number of failed eventing function operations. | float | | -| couchbase.query_index.query.request_time.avg | Average total request time. | float | s | -| couchbase.query_index.query.requests | Current number of requests per second. | float | | -| couchbase.query_index.query.result.count | Number of results returned. | float | | -| couchbase.query_index.ram.pct | The percentage of index entries in ram. | float | | -| couchbase.query_index.ram.remaining | The amount of memory remaining. | float | | -| data_stream.dataset | Data stream dataset. | constant_keyword | | -| data_stream.namespace | Data stream namespace. | constant_keyword | | -| data_stream.type | Data stream type. | constant_keyword | | -| ecs.version | ECS version this event conforms to. `ecs.version` is a required field and must exist in all events. When querying across multiple indices -- which may conform to slightly different ECS versions -- this field lets integrations adjust to the schema version of the events. | keyword | | -| error.message | Error message. | match_only_text | | -| event.category | This is one of four ECS Categorization Fields, and indicates the second level in the ECS category hierarchy. `event.category` represents the "big buckets" of ECS categories. For example, filtering on `event.category:process` yields all events relating to process activity. This field is closely related to `event.type`, which is used as a subcategory. This field is an array. This will allow proper categorization of some events that fall in multiple categories. | keyword | | -| event.dataset | Name of the dataset. If an event source publishes more than one type of log or events (e.g. access log, error log), the dataset is used to specify which one the event comes from. It's recommended but not required to start the dataset name with the module name, followed by a dot, then the dataset name. | keyword | | -| event.duration | Duration of the event in nanoseconds. If event.start and event.end are known this value should be the difference between the end and start time. | long | | -| event.ingested | Timestamp when an event arrived in the central data store. This is different from `@timestamp`, which is when the event originally occurred. It's also different from `event.created`, which is meant to capture the first time an agent saw the event. In normal conditions, assuming no tampering, the timestamps should chronologically look like this: `@timestamp` \< `event.created` \< `event.ingested`. | date | | -| event.kind | This is one of four ECS Categorization Fields, and indicates the highest level in the ECS category hierarchy. `event.kind` gives high-level information about what type of information the event contains, without being specific to the contents of the event. For example, values of this field distinguish alert events from metric events. The value of this field can be used to inform how these kinds of events should be handled. They may warrant different retention, different access control, it may also help understand whether the data coming in at a regular interval or not. | keyword | | -| event.module | Name of the module this data is coming from. If your monitoring agent supports the concept of modules or plugins to process events of a given source (e.g. Apache logs), `event.module` should contain the name of this module. | keyword | | -| event.type | This is one of four ECS Categorization Fields, and indicates the third level in the ECS category hierarchy. `event.type` represents a categorization "sub-bucket" that, when used along with the `event.category` field values, enables filtering events down to a level appropriate for single visualization. This field is an array. This will allow proper categorization of some events that fall in multiple event types. | keyword | | -| host.name | Name of the host. It can contain what `hostname` returns on Unix systems, the fully qualified domain name, or a name specified by the user. The sender decides which value to use. | keyword | | -| service.address | Address where data about this service was collected from. This should be a URI, network address (ipv4:port or [ipv6]:port) or a resource path (sockets). | keyword | | -| service.type | The type of the service data is collected from. The type can be used to group and correlate logs and metrics from one service type. Example: If logs or metrics are collected from Elasticsearch, `service.type` would be `elasticsearch`. | keyword | | -| tags | List of keywords used to tag each event. | keyword | | +| Field | Description | Type | Unit | Metric Type | +|---|---|---|---|---| +| @timestamp | Event timestamp. | date | | | +| agent.id | Unique identifier of this agent (if one exists). Example: For Beats this would be beat.id. | keyword | | | +| cloud.account.id | The cloud account or organization id used to identify different entities in a multi-tenant environment. Examples: AWS account id, Google Cloud ORG Id, or other unique identifier. | keyword | | | +| cloud.availability_zone | Availability zone in which this host, resource, or service is located. | keyword | | | +| cloud.instance.id | Instance ID of the host machine. | keyword | | | +| cloud.provider | Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean. | keyword | | | +| cloud.region | Region in which this host, resource, or service is located. | keyword | | | +| container.id | Unique container id. | keyword | | | +| couchbase.query_index.eventing.failed.count | Total number of failed eventing function operations. | float | | gauge | +| couchbase.query_index.query.request_time.avg | Average total request time. | float | s | gauge | +| couchbase.query_index.query.requests | Current number of requests per second. | float | | gauge | +| couchbase.query_index.query.result.count | Number of results returned. | float | | gauge | +| couchbase.query_index.ram.pct | The percentage of index entries in ram. | float | | gauge | +| couchbase.query_index.ram.remaining | The amount of memory remaining. | float | | gauge | +| data_stream.dataset | Data stream dataset. | constant_keyword | | | +| data_stream.namespace | Data stream namespace. | constant_keyword | | | +| data_stream.type | Data stream type. | constant_keyword | | | +| ecs.version | ECS version this event conforms to. `ecs.version` is a required field and must exist in all events. When querying across multiple indices -- which may conform to slightly different ECS versions -- this field lets integrations adjust to the schema version of the events. | keyword | | | +| error.message | Error message. | match_only_text | | | +| event.category | This is one of four ECS Categorization Fields, and indicates the second level in the ECS category hierarchy. `event.category` represents the "big buckets" of ECS categories. For example, filtering on `event.category:process` yields all events relating to process activity. This field is closely related to `event.type`, which is used as a subcategory. This field is an array. This will allow proper categorization of some events that fall in multiple categories. | keyword | | | +| event.dataset | Name of the dataset. If an event source publishes more than one type of log or events (e.g. access log, error log), the dataset is used to specify which one the event comes from. It's recommended but not required to start the dataset name with the module name, followed by a dot, then the dataset name. | keyword | | | +| event.duration | Duration of the event in nanoseconds. If event.start and event.end are known this value should be the difference between the end and start time. | long | | | +| event.ingested | Timestamp when an event arrived in the central data store. This is different from `@timestamp`, which is when the event originally occurred. It's also different from `event.created`, which is meant to capture the first time an agent saw the event. In normal conditions, assuming no tampering, the timestamps should chronologically look like this: `@timestamp` \< `event.created` \< `event.ingested`. | date | | | +| event.kind | This is one of four ECS Categorization Fields, and indicates the highest level in the ECS category hierarchy. `event.kind` gives high-level information about what type of information the event contains, without being specific to the contents of the event. For example, values of this field distinguish alert events from metric events. The value of this field can be used to inform how these kinds of events should be handled. They may warrant different retention, different access control, it may also help understand whether the data coming in at a regular interval or not. | keyword | | | +| event.module | Name of the module this data is coming from. If your monitoring agent supports the concept of modules or plugins to process events of a given source (e.g. Apache logs), `event.module` should contain the name of this module. | keyword | | | +| event.type | This is one of four ECS Categorization Fields, and indicates the third level in the ECS category hierarchy. `event.type` represents a categorization "sub-bucket" that, when used along with the `event.category` field values, enables filtering events down to a level appropriate for single visualization. This field is an array. This will allow proper categorization of some events that fall in multiple event types. | keyword | | | +| host.name | Name of the host. It can contain what `hostname` returns on Unix systems, the fully qualified domain name, or a name specified by the user. The sender decides which value to use. | keyword | | | +| service.address | Address where data about this service was collected from. This should be a URI, network address (ipv4:port or [ipv6]:port) or a resource path (sockets). | keyword | | | +| service.type | The type of the service data is collected from. The type can be used to group and correlate logs and metrics from one service type. Example: If logs or metrics are collected from Elasticsearch, `service.type` would be `elasticsearch`. | keyword | | | +| tags | List of keywords used to tag each event. | keyword | | | ### XDCR diff --git a/packages/couchbase/manifest.yml b/packages/couchbase/manifest.yml index 0205060354e..5fe8d3220c8 100644 --- a/packages/couchbase/manifest.yml +++ b/packages/couchbase/manifest.yml @@ -1,15 +1,17 @@ -format_version: 1.0.0 +format_version: "3.0.0" name: couchbase title: Couchbase -version: "1.3.1" -license: basic +version: "1.4.0" description: Collect metrics from Couchbase databases with Elastic Agent. type: integration categories: - datastore - observability conditions: - kibana.version: ^8.8.0 + kibana: + version: ^8.8.0 + elastic: + subscription: basic screenshots: - src: /img/couchbase-metrics-overview.png title: Couchbase metrics Overview dashboard @@ -136,3 +138,4 @@ policy_templates: show_user: false owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/couchbase/validation.yml b/packages/couchbase/validation.yml new file mode 100644 index 00000000000..bcc8f74ac3a --- /dev/null +++ b/packages/couchbase/validation.yml @@ -0,0 +1,3 @@ +errors: + exclude_checks: + - SVR00002 diff --git a/packages/couchdb/_dev/deploy/docker/variants.yml b/packages/couchdb/_dev/deploy/docker/variants.yml index fa7eb76afc0..ad95e69275b 100644 --- a/packages/couchdb/_dev/deploy/docker/variants.yml +++ b/packages/couchdb/_dev/deploy/docker/variants.yml @@ -1,4 +1,4 @@ variants: - v3.2.2: + "v3.2.2": SERVICE_VERSION: 3.2.2 default: v3.2.2 diff --git a/packages/couchdb/changelog.yml b/packages/couchdb/changelog.yml index 74bca91e92e..4d06d4b1e91 100644 --- a/packages/couchdb/changelog.yml +++ b/packages/couchdb/changelog.yml @@ -1,4 +1,9 @@ # newer versions go on top +- version: 1.1.0 + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "1.0.1" changes: - description: Remove forwarded tag from metrics data stream. diff --git a/packages/couchdb/data_stream/server/_dev/test/pipeline/test-common-config.yml b/packages/couchdb/data_stream/server/_dev/test/pipeline/test-common-config.yml index c39dc386179..e071d397ddf 100644 --- a/packages/couchdb/data_stream/server/_dev/test/pipeline/test-common-config.yml +++ b/packages/couchdb/data_stream/server/_dev/test/pipeline/test-common-config.yml @@ -1,2 +1,2 @@ dynamic_fields: - event.ingested: ".*" + "event.ingested": ".*" diff --git a/packages/couchdb/data_stream/server/_dev/test/pipeline/test-server-metrics.json-expected.json b/packages/couchdb/data_stream/server/_dev/test/pipeline/test-server-metrics.json-expected.json index 4f31dfdd341..b096bd2475a 100644 --- a/packages/couchdb/data_stream/server/_dev/test/pipeline/test-server-metrics.json-expected.json +++ b/packages/couchdb/data_stream/server/_dev/test/pipeline/test-server-metrics.json-expected.json @@ -59,7 +59,7 @@ "category": [ "database" ], - "ingested": "2023-01-19T05:22:44.510498500Z", + "ingested": "2023-10-11T21:19:18.629229884Z", "kind": "metric", "module": "couchdb", "type": [ diff --git a/packages/couchdb/manifest.yml b/packages/couchdb/manifest.yml index e17f8874e76..b646048c0e2 100644 --- a/packages/couchdb/manifest.yml +++ b/packages/couchdb/manifest.yml @@ -1,15 +1,17 @@ -format_version: 1.0.0 +format_version: "3.0.0" name: couchdb title: CouchDB -version: "1.0.1" -license: basic +version: "1.1.0" description: Collect metrics from CouchDB with Elastic Agent. type: integration categories: - datastore - observability conditions: - kibana.version: ^8.8.0 + kibana: + version: ^8.8.0 + elastic: + subscription: basic screenshots: - src: /img/metricbeat-couchdb-overview.png title: Metricbeat CouchDB Overview @@ -69,3 +71,4 @@ policy_templates: show_user: false owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/couchdb/validation.yml b/packages/couchdb/validation.yml new file mode 100644 index 00000000000..bcc8f74ac3a --- /dev/null +++ b/packages/couchdb/validation.yml @@ -0,0 +1,3 @@ +errors: + exclude_checks: + - SVR00002 diff --git a/packages/etcd/changelog.yml b/packages/etcd/changelog.yml index 1a7ca38a69c..e969a9353ce 100644 --- a/packages/etcd/changelog.yml +++ b/packages/etcd/changelog.yml @@ -1,4 +1,9 @@ # newer versions go on top +- version: 0.6.0 + changes: + - description: Update the package format_version to 3.0.0. + type: enhancement + link: https://github.com/elastic/integrations/pull/8170 - version: "0.5.0" changes: - description: Rename ownership from obs-service-integrations to obs-infraobs-integrations diff --git a/packages/etcd/data_stream/metrics/fields/fields.yml b/packages/etcd/data_stream/metrics/fields/fields.yml index 65603671705..8eb5d4a0a27 100755 --- a/packages/etcd/data_stream/metrics/fields/fields.yml +++ b/packages/etcd/data_stream/metrics/fields/fields.yml @@ -9,7 +9,7 @@ description: | Size of stored data at MVCC - name: wal_fsync_duration.ns.bucket.* - type: object + type: long description: | Latency for writing ahead logs to disk - name: wal_fsync_duration.ns.count @@ -21,7 +21,7 @@ description: | Write ahead logs latency sum - name: backend_commit_duration.ns.bucket.* - type: object + type: long description: | Latency for writing backend changes to disk - name: backend_commit_duration.ns.count diff --git a/packages/etcd/docs/README.md b/packages/etcd/docs/README.md index 06213b43921..848c55511a0 100644 --- a/packages/etcd/docs/README.md +++ b/packages/etcd/docs/README.md @@ -180,11 +180,11 @@ An example event for `metrics` looks as following: | data_stream.type | Data stream type. | constant_keyword | | ecs.version | ECS version this event conforms to. `ecs.version` is a required field and must exist in all events. When querying across multiple indices -- which may conform to slightly different ECS versions -- this field lets integrations adjust to the schema version of the events. | keyword | | etcd.api_version | Etcd API version for metrics retrieval | keyword | -| etcd.disk.backend_commit_duration.ns.bucket.\* | Latency for writing backend changes to disk | object | +| etcd.disk.backend_commit_duration.ns.bucket.\* | Latency for writing backend changes to disk | long | | etcd.disk.backend_commit_duration.ns.count | Backend commits count | long | | etcd.disk.backend_commit_duration.ns.sum | Backend commits latency sum | long | | etcd.disk.mvcc_db_total_size.bytes | Size of stored data at MVCC | long | -| etcd.disk.wal_fsync_duration.ns.bucket.\* | Latency for writing ahead logs to disk | object | +| etcd.disk.wal_fsync_duration.ns.bucket.\* | Latency for writing ahead logs to disk | long | | etcd.disk.wal_fsync_duration.ns.count | Write ahead logs count | long | | etcd.disk.wal_fsync_duration.ns.sum | Write ahead logs latency sum | long | | etcd.memory.go_memstats_alloc.bytes | Memory allocated bytes as of MemStats Go | long | diff --git a/packages/etcd/manifest.yml b/packages/etcd/manifest.yml index 43ebeb372d0..16ac219ed14 100644 --- a/packages/etcd/manifest.yml +++ b/packages/etcd/manifest.yml @@ -1,16 +1,17 @@ -format_version: 1.0.0 +format_version: "3.0.0" name: etcd title: etcd -version: "0.5.0" -license: basic +version: "0.6.0" description: Collect metrics from etcd servers with Elastic Agent. type: integration categories: - datastore - observability -release: experimental conditions: - kibana.version: "^8.3.0" + kibana: + version: "^8.3.0" + elastic: + subscription: basic icons: - src: /img/etcd.svg title: etcd logo @@ -40,3 +41,4 @@ policy_templates: description: Collecting etcd metrics owner: github: elastic/obs-infraobs-integrations + type: elastic diff --git a/packages/etcd/validation.yml b/packages/etcd/validation.yml new file mode 100644 index 00000000000..bcc8f74ac3a --- /dev/null +++ b/packages/etcd/validation.yml @@ -0,0 +1,3 @@ +errors: + exclude_checks: + - SVR00002