diff --git a/CHANGELOG.md b/CHANGELOG.md index 63e644c68..c409bc41f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Change Log +## [1.1.1](https://github.com/CERT-BDF/Cortex/tree/1.1.1) (2017-05-17) + +[Full Changelog](https://github.com/CERT-BDF/Cortex/compare/1.1.0...1.1.1) + +**Implemented enhancements:** + +- Missing logos and favicons [\#25](https://github.com/CERT-BDF/Cortex/issues/25) +- MISP integration feature request [\#21](https://github.com/CERT-BDF/Cortex/issues/21) + ## [1.1.0](https://github.com/CERT-BDF/Cortex/tree/1.1.0) (2017-05-12) [Full Changelog](https://github.com/CERT-BDF/Cortex/compare/1.0.2...1.1.0) @@ -8,7 +17,6 @@ - Add support to .deb and .rpm package generation [\#20](https://github.com/CERT-BDF/Cortex/issues/20) - Display analyzers metadata [\#18](https://github.com/CERT-BDF/Cortex/issues/18) -- MISP integration feature request [\#21](https://github.com/CERT-BDF/Cortex/issues/21) **Closed issues:** diff --git a/app/models/JsonFormat.scala b/app/models/JsonFormat.scala index 561544d3b..4c08752ff 100644 --- a/app/models/JsonFormat.scala +++ b/app/models/JsonFormat.scala @@ -18,7 +18,8 @@ object JsonFormat { implicit val fileArtifactWrites: OWrites[FileArtifact] = OWrites[FileArtifact](fileArtifact ⇒ Json.obj( "attributes" → fileArtifact.attributes)) - implicit val dataArtifactWrites: OWrites[DataArtifact] = Json.writes[DataArtifact] + implicit val dataArtifactWrites: OWrites[DataArtifact] = OWrites[DataArtifact](artifact ⇒ + artifact.attributes + ("data" → JsString(artifact.data))) implicit val dataActifactReads: Reads[DataArtifact] = Json.reads[DataArtifact] val artifactWrites: OWrites[Artifact] = OWrites[Artifact] { diff --git a/app/models/MispModule.scala b/app/models/MispModule.scala index c5c08f065..3d01ed72c 100644 --- a/app/models/MispModule.scala +++ b/app/models/MispModule.scala @@ -1,5 +1,7 @@ package models +import play.api.libs.json.JsObject + case class MispModule( name: String, version: String, @@ -7,7 +9,7 @@ case class MispModule( author: String, dataTypeList: Seq[String], inputAttributes: Seq[String], - config: Seq[String], + config: JsObject, loaderCommand: String) extends Analyzer { val license = "AGPL-3.0" val url = "https://github.com/MISP/misp-modules" diff --git a/app/services/MispSrv.scala b/app/services/MispSrv.scala index c75ee7c23..ff232bb35 100644 --- a/app/services/MispSrv.scala +++ b/app/services/MispSrv.scala @@ -1,12 +1,13 @@ package services import java.io.{ ByteArrayInputStream, FileInputStream, InputStream, SequenceInputStream } -import javax.inject.Inject +import javax.inject.{ Inject, Singleton } import akka.actor.ActorSystem import models.JsonFormat._ import models._ import org.apache.commons.codec.binary.{ Base64, Base64InputStream } +import util.JsonConfig import play.api.libs.json.{ Json, _ } import play.api.{ Configuration, Logger } @@ -15,8 +16,11 @@ import scala.concurrent.{ ExecutionContext, Future } import scala.sys.process._ import scala.util.{ Failure, Success, Try } +@Singleton class MispSrv( - loaderCommandOption: Option[String], + mispModulesEnabled: Boolean, + loaderCommand: String, + mispModuleConfig: JsObject, externalAnalyzerSrv: ExternalAnalyzerSrv, jobSrv: JobSrv, akkaSystem: ActorSystem) { @@ -26,7 +30,9 @@ class MispSrv( externalAnalyzerSrv: ExternalAnalyzerSrv, jobSrv: JobSrv, akkaSystem: ActorSystem) = this( - configuration.getString("misp.modules.loader"), + configuration.getBoolean("misp.modules.enabled").getOrElse(false), + configuration.getString("misp.modules.loader").get, + JsonConfig.configWrites.writes(configuration.getConfig("misp.modules.config").getOrElse(Configuration.empty)), externalAnalyzerSrv, jobSrv, akkaSystem) @@ -34,25 +40,27 @@ class MispSrv( private[MispSrv] lazy val logger = Logger(getClass) private[MispSrv] lazy val analyzeExecutionContext: ExecutionContext = akkaSystem.dispatchers.lookup("analyzer") - lazy val list: Seq[MispModule] = - loaderCommandOption.fold(Seq.empty[MispModule]) { loaderCommand ⇒ - Json.parse(s"$loaderCommand --list".!!) - .as[Seq[String]] - .map { moduleName ⇒ - moduleName → (for { - moduleInfo ← Try(Json.parse(s"$loaderCommand --info $moduleName".!!)) - module ← Try(moduleInfo.as[MispModule](reads(loaderCommand))) - } yield module) - } - .flatMap { - case (moduleName, Failure(error)) ⇒ - logger.warn(s"Load MISP module $moduleName fails", error) - Nil - case (_, Success(module)) ⇒ - logger.info(s"Register MISP module ${module.name} ${module.version}") - Seq(module) - } - } + logger.info(s"MISP modules is ${if (mispModulesEnabled) "enabled" else "disabled"}, loader is $loaderCommand") + + lazy val list: Seq[MispModule] = if (mispModulesEnabled) { + Json.parse(s"$loaderCommand --list".!!) + .as[Seq[String]] + .map { moduleName ⇒ + moduleName → (for { + moduleInfo ← Try(Json.parse(s"$loaderCommand --info $moduleName".!!)) + module ← Try(moduleInfo.as[MispModule](reads(loaderCommand, mispModuleConfig))) + } yield module) + } + .flatMap { + case (moduleName, Failure(error)) ⇒ + logger.warn(s"Load MISP module $moduleName fails", error) + Nil + case (_, Success(module)) ⇒ + logger.info(s"Register MISP module ${module.name} ${module.version}") + Seq(module) + } + } + else Nil def get(moduleName: String): Option[MispModule] = list.find(_.name == moduleName) @@ -89,30 +97,31 @@ class MispSrv( } def query(module: String, mispType: String, data: String)(implicit ec: ExecutionContext): Future[JsObject] = { - loaderCommandOption - .flatMap { loaderCommand ⇒ - val artifact = toArtifact(mispType, data) - get(module) - .map { mispModule ⇒ - val mispReport = Future { - val input = Json.obj(mispType → data) - val output = (s"$loaderCommand --run $module" #< input.toString).!! - Json.parse(output).as[JsObject] - } - jobSrv.create(mispModule, artifact, mispReport.map(toReport)) - mispReport - + val artifact = toArtifact(mispType, data) + val mispModule = if (mispModulesEnabled) { + get(module) + .map { mispModule ⇒ + val mispReport = Future { + val input = Json.obj("config" → mispModule.config, mispType → data) + val output = (s"$loaderCommand --run $module" #< input.toString).!! + Json.parse(output).as[JsObject] } - .orElse { - externalAnalyzerSrv - .get(module) - .map { analyzer ⇒ - externalAnalyzerSrv.analyze(analyzer, artifact) - .map { report ⇒ toMispOutput(report) } - } + jobSrv.create(mispModule, artifact, mispReport.map(toReport)) + mispReport + + } + } + else None + mispModule + .orElse { + externalAnalyzerSrv + .get(module) + .map { analyzer ⇒ + externalAnalyzerSrv.analyze(analyzer, artifact) + .map { report ⇒ toMispOutput(report) } } } - .getOrElse(Future.failed(new Exception(s"Module $module not found"))) + .getOrElse(Future.failed(new Exception(s"Module $module not found"))) // TODO add appropriate exception } def analyze(module: MispModule, artifact: Artifact): Future[Report] = { @@ -121,10 +130,13 @@ class MispSrv( val input = artifact match { case DataArtifact(data, _) ⇒ - stringStream(Json.obj(dataType2mispType(artifact.dataType).head → data).toString) + val mispType = dataType2mispType(artifact.dataType) + .filter(module.inputAttributes.contains) + .head + stringStream((Json.obj("config" → module.config) + (mispType → JsString(data))).toString) case FileArtifact(data, _) ⇒ new SequenceInputStream(Iterator( - stringStream("""{"attachment":""""), + stringStream(Json.obj("config" → module.config).toString.replaceFirst("}$", ""","attachment":"""")), new Base64InputStream(new FileInputStream(data), true), stringStream("\"}")).asJavaEnumeration) } @@ -207,15 +219,26 @@ class MispSrv( else mispTypes } - private def reads(loaderCommand: String): Reads[MispModule] = + private def reads(loaderCommand: String, mispModuleConfig: JsObject): Reads[MispModule] = for { name ← (__ \ "name").read[String] - version ← (__ \ "meta" \ "version").read[String] - description ← (__ \ "meta" \ "description").read[String] - author ← (__ \ "meta" \ "author").read[String] - config ← (__ \ "meta" \ "config").read[Seq[String]] + version ← (__ \ "moduleinfo" \ "version").read[String] + description ← (__ \ "moduleinfo" \ "description").read[String] + author ← (__ \ "moduleinfo" \ "author").read[String] + config = (mispModuleConfig \ name).asOpt[JsObject].getOrElse(JsObject(Nil)) + requiredConfig ← (__ \ "config").read[Set[String]] + missingConfig = requiredConfig -- config.keys + _ ← if (missingConfig.nonEmpty) { + val message = s"MISP module $name is disabled because the following configuration " + + s"item${if (missingConfig.size > 1) "s are" else " is"} missing: ${missingConfig.mkString(", ")}" + logger.warn(message) + Reads[Unit](_ ⇒ JsError(message)) + } + else { + Reads[Unit](_ ⇒ JsSuccess(())) + } input ← (__ \ "mispattributes" \ "input").read[Seq[String]] - dataTypes = input.map(mispType2dataType) + dataTypes = input.map(mispType2dataType).distinct } yield MispModule(name, version, description, author, dataTypes, input, config, loaderCommand) private val typeLookup = Map( diff --git a/build.sbt b/build.sbt index 75bf94c82..b3eff5f60 100644 --- a/build.sbt +++ b/build.sbt @@ -43,14 +43,21 @@ mappings in Universal ~= { file("package/cortex.service") -> "package/cortex.service", file("package/cortex.conf") -> "package/cortex.conf", file("package/cortex") -> "package/cortex", - file("package/logback.xml") -> "conf/logback.xml" + file("package/logback.xml") -> "conf/logback.xml", + file("contrib/misp-modules-loader.py") -> "contrib/misp-modules-loader.py" ) } // Package // -maintainer := "Thomas Franco val mappings = pm.mappings.filterNot { @@ -62,7 +69,7 @@ linuxPackageMappings ~= { _.map { pm => file("package/cortex.conf") -> "/etc/init/cortex.conf", file("package/cortex") -> "/etc/init.d/cortex", file("conf/application.sample") -> "/etc/cortex/application.conf", - file("conf/logback.xml") -> "/etc/cortex/logback.xml" + file("package/logback.xml") -> "/etc/cortex/logback.xml" ).withConfig() } @@ -125,7 +132,11 @@ dockerCommands ~= { dc => "apt-get install -y --no-install-recommends python-pip python2.7-dev ssdeep libfuzzy-dev libfuzzy2 libimage-exiftool-perl libmagic1 build-essential git && " + "cd /opt && " + "git clone https://github.com/CERT-BDF/Cortex-Analyzers.git && " + - "pip install $(sort -u Cortex-Analyzers/analyzers/*/requirements.txt)"), + "pip install $(sort -u Cortex-Analyzers/analyzers/*/requirements.txt) && " + + "apt-get install -y --no-install-recommends python3-setuptools python3-dev zlib1g-dev libxslt1-dev libxml2-dev libpq5 libjpeg-dev && git clone https://github.com/MISP/misp-modules.git && " + + "easy_install3 pip && " + + "(cd misp-modules && pip3 install -I -r REQUIREMENTS && pip3 install -I .) && " + + "rm -rf misp_modules /var/lib/apt/lists/* /tmp/*"), Cmd("ADD", "var", "/var"), Cmd("ADD", "etc", "/etc"), ExecCmd("RUN", "chown", "-R", "daemon:daemon", "/var/log/cortex")) ++ diff --git a/conf/application.sample b/conf/application.sample new file mode 100644 index 000000000..149437e4a --- /dev/null +++ b/conf/application.sample @@ -0,0 +1,153 @@ +# Secret key +# ~~~~~ +# The secret key is used to secure cryptographics functions. +# If you deploy your application to several instances be sure to use the same key! +#play.crypto.secret="***changeme***" + +analyzer { + path = "path/to/Cortex-Analyzers/analyzers" + config { + CIRCLPassiveDNS { + #user= "..." + #password= "..." + } + CIRCLPassiveSSL { + #user= "..." + #password= "..." + } + DNSDB { + #server="https://api.dnsdb.info" + #key="..." + } + DomainTools { + #username="..." + #key="..." + } + GoogleSafebrowsing { + #key = "..." + } + Hippocampe { + #url="..." + } + JoeSandbox { + #url = "..." + #apikey = "..." + } + Nessus { + #url ="..." + #login="..." + #password="..." + #policy="..." + #ca_bundle="..." + #allowed_network="..." + } + OTXQuery { + #key="..." + } + PassiveTotal { + #key="..." + #username="..." + } + PhishingInitiative { + #key="..." + } + PhishTank { + #key="..." + } + Virusshare { + #path = "..." + } + VirusTotal { + #key="..." + } + Yara { + #rules=["..."] + } + } + + fork-join-executor { + # Min number of threads available for analyze + parallelism-min = 2 + # Parallelism (threads) ... ceil(available processors * factor) + parallelism-factor = 2.0 + # Max number of threads available for analyze + parallelism-max = 4 + } +} + +misp.modules { + enabled = true + + config { + shodan { + #apikey = "" + } + eupi { + #apikey = "" + #url = "" + } + passivetotal { + #username = "" + #api_key = "" + } + dns { + #nameserver = "" + } + whois { + #server = "" + #port = "" + } + sourcecache { + #archivepath = "" + } + geoip_country { + } + circl_passivessl { + #username = "" + #password = "" + } + iprep { + #apikey = "" + } + countrycode { + } + cve { + } + virustotal { + #apikey = "" + #event_limit = "" + } + ipasn { + #host = "" + #port = "" + #db = "" + } + circl_passivedns { + #username = "" + #password = "" + } + vmray_submit { + #apikey = "" + #url = "" + #shareable = "" + #do_not_reanalyze = "" + #do_not_include_vmrayjobids = "" + } + wiki { + } + domaintools { + #username = "" + #api_key = "" + } + reversedns { + #nameserver = "" + } + threatminer { + } + asn_history { + #host = "" + #port = "" + #db = "" + } + } +} diff --git a/conf/reference.conf b/conf/reference.conf index 8484a2429..229d9624d 100644 --- a/conf/reference.conf +++ b/conf/reference.conf @@ -1,6 +1,12 @@ # handler for errors (transform exception to related http status code play.http.errorHandler = services.ErrorHandler +# MISP modules loader location +misp.modules { + enabled = false + loader = ${play.server.dir}/"contrib/misp-modules-loader.py" +} + analyzer { # Directory that holds analyzers path = analyzers @@ -8,7 +14,7 @@ analyzer { config { dummy = dummy } - + fork-join-executor { # Min number of threads available for analyze parallelism-min = 2 diff --git a/contrib/misp-modules-loader.py b/contrib/misp-modules-loader.py index c265aaaf8..be5d1fea6 100755 --- a/contrib/misp-modules-loader.py +++ b/contrib/misp-modules-loader.py @@ -17,51 +17,61 @@ """ -def run(argv): +def usage(): + print(__file__ + " --list") + print(__file__ + " --info ") + print(__file__ + " --run ") + +def run(argv): mhandlers, modules = misp_modules.load_package_modules() try: - opts, args = getopt.getopt(argv, 'lh:i:r:', ["list", "help", "info=","run="]) + opts, args = getopt.getopt(argv, 'lh:i:r:', ["list", "help", "info=", "run="]) except getopt.GetoptError as err: - print(__file__ + " --info ") - print(__file__ + " --run ") + usage() print(str(err)) sys.exit(2) - module = None - path = None - for opt,arg in opts: + for opt, arg in opts: # TODO: check if module exist else exit if opt in ('-h', '--help'): - print(__file__ + " --info ") - print(__file__ + " --run ") + usage() sys.exit() elif opt in ('-l', '--list'): - modules = [m for m in modules if mhandlers['type:' + m ] == "expansion"] + modules = [m for m in modules if mhandlers['type:' + m] == "expansion"] print(json.dumps(modules)) sys.exit(0) elif opt in ('-r', '--run'): - module = arg + module_name = arg + try: data = json.load(sys.stdin) - print(json.dumps(mhandlers[module].handler(json.dumps(data)))) - sys.exit(0) - - elif opt in ('-i','--info'): - module = arg - - print(json.dumps({'name': module, 'mispattributes': mhandlers[module].mispattributes, - 'moduleinfo':mhandlers[module].moduleinfo})) + print(json.dumps(mhandlers[module_name].handler(json.dumps(data)))) + except: + error = {'error': sys.exc_info()[1].args[0]} + print(json.dumps(error)) + sys.exit(0) + elif opt in ('-i', '--info'): + module_name = arg + try: + config = mhandlers[module_name].moduleconfig + except AttributeError: + config = [] + print(json.dumps({ + 'name': module_name, + 'mispattributes': mhandlers[module_name].mispattributes, + 'moduleinfo': mhandlers[module_name].moduleinfo, + 'config': config + })) if __name__ == '__main__': if len(sys.argv[1:]) > 0: run(sys.argv[1:]) else: - print(__file__ + " --info ") - print(__file__ + " --run ") + usage() sys.exit(2) diff --git a/docs/FAQ.md b/docs/FAQ.md deleted file mode 100644 index 6d2fbfd22..000000000 --- a/docs/FAQ.md +++ /dev/null @@ -1,72 +0,0 @@ -# Analyzers -- [What version of MaxMind TheHive uses?](https://github.com/CERT-BDF/Cortex/wiki/FAQ#what-version-of-maxmind-cortex-uses) -- [How often are the MaxMind databases refreshed?](https://github.com/CERT-BDF/Cortex/wiki/FAQ#how-often-are-the-databases-refreshed) -- [How shall I configure the MaxMind analyzer?](https://github.com/CERT-BDF/Cortex/wiki/FAQ#how-shall-i-configure-the-maxmind-analyzer) -- [Can I use the commercial versions of the databases?](https://github.com/CERT-BDF/Cortex/wiki/FAQ#can-i-use-the-commercial-versions-of-the-databases) - -## MaxMind -### What version of MaxMind Cortex uses? -The MaxMind analyzer includes the GeoLite2 free City and Country databases. - -### How often are the MaxMind databases refreshed? -Cortex does not refresh those databases. It is up to you to create a cron job to refresh them at the frequency you want. The files to update are: -- `analyzers/MaxMind/GeoLite2-City.mmdb` -- `analyzers/MaxMind/GeoLite2-Country.mmdb` - -You can fetch up-to-date versions from . - -### How shall I configure the MaxMind analyzer? -No configuration is required. If it looks like the analyzer is not working, please clear the cache of your browser and retry. If it still doesn't work, please join [TheHive User Discussion Forum](https://groups.google.com/a/thehive-project.org/d/forum/users) or [open an issue on GitHub](https://github.com/CERT-BDF/Cortex-analyzers/issues/new). - -### Can I use the commercial versions of the databases? -The current version of Cortex does not offer that possibility. If you'd like to have it, please [request it](https://github.com/CERT-BDF/Cortex-analyzers/issues/new). - -# Authentication -- [Does Cortex support authentication?](https://github.com/CERT-BDF/Cortex/wiki/FAQ/does-cortex-support-authentication) -- [How can I make sure that only authorized users get access to Cortex?](https://github.com/CERT-BDF/Cortex/wiki/FAQ/how-can-i-make-sure-that-only-authorized-users-get-access-to-cortex) -- [How can I make sure that only authorized services get access to the Cortex API?](https://github.com/CERT-BDF/Cortex/wiki/FAQ/how-can-i-make-sure-that-only-authorized-services-get-access-to-cortex-api) - -### Does Cortex support authentication? -No. Cortex 1 does not support authentication. Cortex 2, slated for September 2017, [will support local, LDAP and AD authentication](https://github.com/CERT-BDF/Cortex/issues/7). - -### How can I make sure that only authorized users get access to Cortex? -Cortex does not currently support authentication. The next major version (v2), slated for September 2017, [will implement it](https://github.com/CERT-BDF/Cortex/issues/7). In the meantime, you should either install an authenticating reverse proxy in front of Cortex or limit access to it using a firewall or an alternative filtering device. - -If you do not protect your Cortex instance, anyone who has access to your network may run jobs or retrieve existing reports. - -### How can I make sure that only authorized services get access to Cortex API? -Cortex does not currently support service authentication or API keys. The next major version (v2), slated for September 2017, [will implement it](https://github.com/CERT-BDF/Cortex/issues/7). - -Any service may query Cortex without authentication. If you need to let only authorized services get access to your instance(s), make sure to use a filtering device and authorize only the IP addresses of those services. - -# Miscellaneous Questions -- [Can I Enable HTTPS to Connect to Cortex?](https://github.com/CERT-BDF/Cortex/wiki/FAQ#can-i-enable-https-to-connect-to-cortex) - -### Can I Enable HTTPS to Connect to Cortex? -#### TL;DR -Add the following lines to `/etc/cortex/application.conf` - - https.port: 9443 - play.server.https.keyStore { - path: "/path/to/keystore.jks" - type: "JKS" - password: "password_of_keystore" - } - -HTTP can disabled by adding line `http.port=disabled` -#### Details -To enable HTTPS in the application, add the following lines to `/etc/cortex/application.conf`: -``` - https.port: 9443 - play.server.https.keyStore { - path: "/path/to/keystore.jks" - type: "JKS" - password: "password_of_keystore" - } -``` -As HTTPS is enabled HTTP can be disabled by adding `http.port=disabled` in configuration. - -To import your certificate in the keystore, depending on your situation, you can follow [Digital Ocean's tutorial](https://www.digitalocean.com/community/tutorials/java-keytool-essentials-working-with-java-keystores). - -**More information**: -This is a setting of the Play framework that is documented on its website. Please refer to [https://www.playframework.com/documentation/2.5.x/ConfiguringHttps](https://www.playframework.com/documentation/2.5.x/ConfiguringHttps). \ No newline at end of file diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index f8eb06819..000000000 --- a/docs/README.md +++ /dev/null @@ -1,31 +0,0 @@ -Cortex tries to solve a common problem frequently encountered by SOCs, CSIRTs and security researchers in the course of threat intelligence, digital forensics and incident response: how to analyze observables they have collected, **at scale, by querying a single tool** instead of several? - -## Hardware pre-requisites -Cortex uses a Java VM. We recommend using a virtual machine with 8vCPU, 8 GB of RAM and 10 GB of disk. You can also use a -physical machine with similar specifications. - -## What's new - -- [Changelog](/CHANGELOG.md) - -## Installation guides - -Cortex can be installed using: -- An [RPM package](installation/rpm-guide.md) -- A [DEB package](installation/deb-guide.md) -- [Docker](installation/docker-guide.md) -- [Binary](installation/binary-guide.md) -- [Ansible script](https://github.com/drewstinnett/ansible-cortex) contributed by -[@drewstinnett](https://github.com/drewstinnett) - -Cortex can also be [built from sources](installation/build-guide.md). - -Once you have installed Cortex, you need to [install the analyzers](installation/analyzers.md). - -## Developer guides - -- [API documentation](api/README.md) -- [How to create an analyzer](api/how-to-create-an-analyzer.md) - -## Other -- [FAQ](FAQ.md) diff --git a/docs/admin/configuration.md b/docs/admin/configuration.md deleted file mode 100644 index 627e1a55b..000000000 --- a/docs/admin/configuration.md +++ /dev/null @@ -1,31 +0,0 @@ -# Configuration - -Cortex back-end and analyzers can find their configuration in the same file. - -The only required parameter in order to start Cortex is the key of the server (`play.crypto.secret`). This key is used -to authenticate cookies that contain data, and not only a session id. If Cortex runs in cluster mode, all instance must -share the same key. - -You should generate a random key using the following command line: - -``` -sudo mkdir /etc/cortex -(cat << _EOF_ -# Secret key -# ~~~~~ -# The secret key is used to secure cryptographics functions. -# If you deploy your application to several instances be sure to use the same key! -play.crypto.secret="$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1)" -_EOF_ -) | sudo tee -a /etc/cortex/application.conf - -``` - -Please, note that this secret key is mandatory to start Cortex application. With this configuration, you will only be -able to run analyzers that do not require any configuration parameter, an API key for instance. To configure other -analyzers, refer to [analyzers](../installation/analyzers.md). - -**Warning**: By default, Cortex run an HTTP service on port `9000/tcp`. You can change the port by adding -`http.port=8080` in the configuration file or add the `-Dhttp.port=8080` parameter to the command line below. If you run -Cortex using a non-privileged user, you can't bind a port under 1024. If you run TheHive on the same system beware to -use two different TCP ports. diff --git a/docs/api/README.md b/docs/api/README.md deleted file mode 100644 index 164c5e279..000000000 --- a/docs/api/README.md +++ /dev/null @@ -1,23 +0,0 @@ - -If you are using [TheHive](https://github.com/CERT-BDF/TheHive) as a SIRP (Security Incident Response Platform), you -don't need to master the Cortex REST API. If you have a different SIRP or would like to interface other tools with -Cortex, please read on. - -## TL;DR -The current Cortex version doesn't require authentication and all API call results are provided in JSON format. - -## Available API Calls - -- [List analyzers](list-analyzers.md) -- [Get an analyzer's definition](get-analyzer.md) -- [List analyzers for a given datatype](get-analyzer-by-type.md) -- [Run an analyzer](run-analyzer.md) -- [List jobs](list-jobs.md) -- [Get a job definition](get-job.md) -- [Delete a job](delete-job.md) -- [Get a job report](get-job-report.md) -- [Wait and get a job report](wait-and-get-job-report.md) - -## How to create an analyzer - -If you want to create an analyzer, follow this [guide](how-to-create-an-analyzer.md). \ No newline at end of file diff --git a/docs/api/delete-job.md b/docs/api/delete-job.md deleted file mode 100644 index b48667007..000000000 --- a/docs/api/delete-job.md +++ /dev/null @@ -1,26 +0,0 @@ -This API call lets you delete an existing job, identified by its ID. - -**URL** -``` -DELETE /api/job/ -``` - -`JOB_ID` must be a valid job `id`. - -**Output** - -This API call doesn't produce any output. - -**Response codes** - -| Status Code | Description | -| ------------ | ------------- | -| 200 | The deletion has been made successfully | -| 404 | **TBD**: The job is unknown | -| 500 | An expected error occurred | - -**How to use it** - -``` -curl -XDELETE http://:/api/job/ -``` diff --git a/docs/api/get-analyzer-by-type.md b/docs/api/get-analyzer-by-type.md deleted file mode 100644 index edfd2d05f..000000000 --- a/docs/api/get-analyzer-by-type.md +++ /dev/null @@ -1,56 +0,0 @@ -This API call returns the list of all the analyzers that can act upon a given datatype (IP address, hash, domain...). - -**URL** -``` -GET /api/analyzer/type/ -``` - -where `DATATYPE` is a valid observable datatype: ip, url, domain, and so on. - -**Output** - -Returns a JSON array representing a list of all the analyzers that can act upon that specific datatype. Each entry includes the following attributes: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| id | String | The analyzer's identifier | -| name | String| The analyzer's name | -| version | String | The analyzer's version | -| dataTypeList | String[] | An array of the observable datatypes that the analyzer can act upon | - -*Example* - -```json -[ - { - "name": "Abuse_Finder", - "version": "1.0", - "description": "Use CERT-SG's Abuse Finder to find the abuse contact associated with domain names, URLs, IP and email addresses.", - "dataTypeList": [ - "ip", - "domain", - "url", - "email" - ], - "id": "Abuse_Finder_1_0" - }, - { - "name": "HippoMore", - "version": "1.0", - "description": "Hippocampe detailed report: provides the last detailed report for an IP, domain or a URL", - "dataTypeList": [ - "ip", - "domain", - "fqdn", - "url" - ], - "id": "HippoMore_1_0" - } -] -``` - -**How to use it** - -``` -curl http://:/api/analyzer/type/domain -``` \ No newline at end of file diff --git a/docs/api/get-analyzer.md b/docs/api/get-analyzer.md deleted file mode 100644 index f12de90dc..000000000 --- a/docs/api/get-analyzer.md +++ /dev/null @@ -1,39 +0,0 @@ -This API call returns the details of a given analyzer when you supply its ID. If you don't know the ID of the analyzer, you can get a list of all the available analyzers and the corresponding IDs by referring to the [List analyzers](List-analyzers-API) page. - -**URL** -``` -GET /api/analyzer/ -``` - -`ANALYZER_ID` should be a valid analyzer `id`. - -**Output** - -Returns a JSON object representing an analyzer, with the following attributes: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| id | String | The analyzer's identifier | -| name | String| The analyzer's name | -| version | String | The analyzer's version | -| dataTypeList | String[] | An array of the observable datatypes that the analyzer can act upon | - -*Example* - -```json -{ - "name": "File_Info", - "version": "1.0", - "description": "Parse files in several formats such as OLE and OpenXML to detect VBA macros, extract their source code, generate useful information on PE, PDF files and much more.", - "dataTypeList": [ - "file" - ], - "id": "File_Info_1_0" -} -``` - -**How to use it** - -``` -curl http://:/api/analyzer/ -``` \ No newline at end of file diff --git a/docs/api/get-job-report.md b/docs/api/get-job-report.md deleted file mode 100644 index 30f335fb5..000000000 --- a/docs/api/get-job-report.md +++ /dev/null @@ -1,100 +0,0 @@ -This API call returns the details and report of a given job, identified by its ID. - -**URL** -``` -GET /api/job//report -``` - -`JOB_ID` must be a valid job `id`. - -**Output** - -Returns a JSON object representing a job, with the following attributes: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| id | String | The job's id | -| analyzerId | String| The analyzer's id | -| status | String | The job's status: `Success`, `InProgress` or `Failure` | -| date | Number | A timestamp which represent the job's start date | -| artifact | Object | The observable details | -| report | `` Object | The job report | - - -The `` could be any JSON object, but Cortex uses some conventions. The structure of the `` object as defined by Cortex is described below: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| success | Boolean | True if the job is successful, False if it failed | -| errorMessage | String | Contains the error message if the job failed | -| summary | Object | A custom JSON object with any content (based on the analyzer) | -| artifacts | ``[] | An array of the artifacts extracted from the analysis | -| full | Object | A custom JSON object with any content (based on the analyzer). Represents the full analysis report | - - -The `` is an object representing an observable and has two attributes: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| type | String | The artifact's datatype (url, hash, ip, domain...) | -| value | String | The observable's value | - - -*Example* - -```json -{ - "id": "vVQu93ps4PwHOtLv", - "analyzerId": "File_Info_1_0", - "status": "Success", - "date": 1490204071457, - "artifact": { - "attributes": { - "dataType": "file", - "tlp": 2, - "content-type": "text/x-python-script", - "filename": "sample.py" - } - }, - "report": { - "artifacts": [ - { - "type": "sha1", - "value": "cd1c2da4de388a4b5b60601f8b339518fe8fbd31" - }, - { - "type": "sha256", - "value": "fd1755c7f1f0f85597cf2a1f13f5cbe0782d9e5597aca410da0d5f26cda26b97" - }, - { - "type": "md5", - "value": "3aa598d1f0d50228d48fe3a792071dde" - } - ], - "full": { - "Mimetype": "text/x-python", - "Identification": { - "ssdeep": "24:8ca1hbLcd8yutXHbLcTtvbrbLcvtEbLcWmtlbLca66/5:8zHbLcdbOXbLc5jrbLcVEbLcPlbLcax", - "SHA1": "cd1c2da4de388a4b5b60601f8b339518fe8fbd31", - "SHA256": "fd1755c7f1f0f85597cf2a1f13f5cbe0782d9e5597aca410da0d5f26cda26b97", - "MD5": "3aa598d1f0d50228d48fe3a792071dde" - }, - "filetype": "python script", - "Magic": "Python script, ASCII text executable", - "Exif": { - "ExifTool:ExifToolVersion": 10.36 - } - }, - "success": true, - "summary": { - "filetype": "python script" - } - } -} -``` - -**How to use it** - -``` -curl http://:/api/job//report -``` \ No newline at end of file diff --git a/docs/api/get-job.md b/docs/api/get-job.md deleted file mode 100644 index 321e6e200..000000000 --- a/docs/api/get-job.md +++ /dev/null @@ -1,44 +0,0 @@ -This API call returns the details of a given job, identified by its ID. It doesn't include the job's report, which can be fetched using the [Get job report API](Get-job-report-API). - -**URL** -``` -GET /api/job/ -``` - -`JOB_ID` must be a valid job `id`. - -**Output** - -Returns a JSON object representing a job with the following attributes: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| id | String | The job's id | -| analyzerId | String| The analyzer's id | -| status | String | The job's status: `Success`, `InProgress` or `Failure` | -| date | Number | A timestamp which represents the job's start date | -| artifact | Object | The observable details | - -*Example* - -```json -{ - "id": "c9uZDbHBf32DdIVJ", - "analyzerId": "MaxMind_GeoIP_2_0", - "status": "Success", - "date": 1490194495262, - "artifact": { - "data": "8.8.8.8", - "attributes": { - "dataType": "ip", - "tlp": 2 - } - } -} -``` - -**How to use it** - -``` -curl http://:/api/job/ -``` \ No newline at end of file diff --git a/docs/api/how-to-create-an-analyzer.md b/docs/api/how-to-create-an-analyzer.md deleted file mode 100644 index 9b10a4aee..000000000 --- a/docs/api/how-to-create-an-analyzer.md +++ /dev/null @@ -1,357 +0,0 @@ -# Overview -The main goal of Cortex is to run analysis on a given observable, defined by its data type, and a value. This observable could be of any type: IP, domain, URL, email, file... - -The programs that Cortex invokes to analyze observables are called **Analyzers**. An **analyzer** could be written in any programming language supported by Linux as long as the resulting program is on the same machine as Cortex and is executable. - -# Create a Basic Analyzer -From a technical standpoint, a minimal **analyzer** would be defined by: -- A JSON definition file -- An executable script. As of this writing, all the available analyzers are written in Python. However, analyzers can be written in any programming language supported by Linux - -Throughout this document, we will use the [Hippocampe_More](https://github.com/CERT-BDF/Cortex-Analyzers/tree/master/analyzers/Hippocampe) analyzer as an example to teach you how to write your own analyzer. - -Our **analyzer** will be defined inside a folder called `Hippocampe`. We use a convention. The folder where the analyzer is located is named after the product or service it leverages to do its work: MISP, MaxMind, PassiveTotal, VirusTotal, DomainTools... - -## The JSON Definition File -As its name implies, the JSON definition file contain metadata describing the analyzer. The associated filename must be `_.json`. For example: `Hippocampe_more.json` and `Hippocampe_hipposcore.json`. - -The structure of this file is described in the table below: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| name | String, `REQUIRED` | The analyzer's name | -| version | String `REQUIRED` | The analyzer's version | -| description | String `REQUIRED`| The analyzer's description | -| dataTypeList | String[] `REQUIRED` | An array of strings, listing the observable data types that could be analyzed | -| command | String `REQUIRED` | The command to invoke the analyzer's script. It must be relative to the root directory that contain all the analyzer folders | -| baseConfig* | String `OPTIONAL` | The name of the configuration attribute defined in the Cortex configuration file | -| config* | Object `OPTIONAL` | A configuration object that will be passed to the analyzer's command | - -**Note**: Fields marked with a star (*) will be described in depth later in this document. - -**Example** -```json -{ - "name": "HippoMore", - "version": "1.0", - "description": "Hippocampe detailed report: provides the last detailed report for an IP, domain or a URL", - "dataTypeList": ["ip", "domain", "fqdn", "url"], - "baseConfig": "Hippocampe", - "config": { - "check_tlp": false, - "max_tlp":3, - "service": "more" - }, - "command": "Hippocampe/hippo.py" -} -``` - -## The Script -The analyzer script must be an executable script that Cortex runs using the `command` provided within the JSON definition file. The script could be written in any programming language, as long as it could be executed using a shell command. - -When running the analyzer's script file, Cortex provides some input data through the standard input, and expects an output through the standard output. - -### Analyzer Input -In Cortex, we distinguish between two types of observables: -- Value-based observables -- File-based observables - -The input sent by Cortex to the analyzers depend on the observable type. - -*Note*: when using Cortex with [TheHive](https://github.com/CERT-BDF/TheHive/), we use some output conventions that allow us to normalize the way TheHive displays the analysis reports. - -#### Input for Value-based Observables -The input for value-based observables must have the following structure: - -```json -{ - "dataType": "ip", - "data": "8.8.8.8", - "config": {} -} -``` - -The following table explains the JSON schema of the input: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| dataType | String, `REQUIRED` | The observables's type | -| data | String `REQUIRED` | The observable's value | -| config* | Object `OPTIONAL` | A config object, representing the analyzer's options and parameters if any | - - -#### Input for File-based Observables -The input for file-based observables must have the following structure: -```json -{ - "dataType": "file", - "attachment": { - "name": "" - }, - "file": "/path/to/file/observable", - "config": {} -} -``` - -The following table explains the JSON schema of the input: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| dataType | String, `REQUIRED` | The observable's type (`file` in this case) | -| file | String `REQUIRED` | The observable's file path | -| attachment.name | String `OPTIONAL` | The observable's file name | -| config* | Object `OPTIONAL` | A config object, representing the analyzer's options and parameters if any | - -#### The Analyzer Configuration Object -An analyzer can define a default configuration object in its JSON definition file. Cortex can override or add additional configuration properties using the Cortex's configuration file. - -Based on that, the `config` object passed to the analyzer's script results from the merge operation of three objects: -- the `config` object defined in the analyzer's JSON definition file (defines the default values of the analyzer's config) -- the `baseConfig` object defined in the Cortex's configuration file using the `analyzer.config.` property (used to hold sensitive properties like API keys or credentials) -- the *global* analyzers configuration defined in the Cortex's configuration file using `analyzer.config.global` (generally contains proxy configuration information) - - -### Analyzer Output -The output from Cortex could technically be any JSON object. That said, Cortex's UI might rely on a specific attribute to decide if the job failed or succeeded. The property is named `success` and must be a Boolean value. - -In the existing analyzers we tried to stick to some conventions where we defined the formats defined below. - -#### Successful Analysis -```json -{ - "success": true, - "summary": {}, - "artifacts": [], - "full": {} -} -``` - -The following table explains the JSON conventions of the output: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| success | Boolean `REQUIRED` | The analysis success flag | -| summary | Object `OPTIONAL` | The analysis summary: a small report | -| full | Object `REQUIRED` | The analysis complete report | -| artifacts | Array[``] `OPTIONAL` | An array of artifacts discovered by the analyzer | - -The `` object has the following structure: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| type | String `REQUIRED` | The artifact data type | -| value | String `REQUIRED` | The artifact value | - -**Note**: the `artifacts` array will be used in the future by [TheHive](https://github.com/CERT-BDF/TheHive/) to display or import the extracted artifacts from an analysis report. - -#### Unsuccessful Analysis -```json -{ - "success": false, - "errorMessage": "" -} -``` - -# The Cortexutils Library -`cortexutils` is a Python library available on `pip`. It provides a Python class that facilitates the creation of analyzer script files. It includes an abstract `Analyzer` class that a programmer may inherit and override in their script. It also provides some methods to quickly format the output to be compliant with the JSON schema expected by [TheHive](https://github.com/CERT-BDF/TheHive/). - -To create an analyzer class, developers have to: - -1. Create a subclass of `cortexutils.analyzer.Analyzer` -2. Override the constructor, call the super constructor and if needed, read the specific analyzer's options (read specific configuration properties from the config object) -3. Override the `run` method. It must either return a report, using the `report` method, or an error using the `error`method. If `run`is not overridden, the analyzer returns an empty report -3. Optionally override the `summary` method. It should return a JSON object representing a summary of the analyzer report. If not overridden, the analyzer returns an empty summary -3. Optionally override the `artifacts` method. It should return a JSON array representing a list of `artifact` objects (as described above). If not overridden, the analyzer returns the result of an `ioc-parser`, ran over the full JSON report. - -Below is an example of a basic analyzer that can handle IPs and domains: - -```python -#!/usr/bin/env python -# encoding: utf-8 - -from cortexutils.analyzer import Analyzer - -# Define analyzer's class -class BasicExampleAnalyzer(Analyzer): - # Analyzer's constructor - def __init__(self): - # Call the constructor of the super class - Analyzer.__init__(self) - - # Read specific config options - self.optional_prop = self.getParam('config.optional_prop', '') - self.required_prop = self.getParam('config.required_prop', None, 'Error: Missing required_prop') - - # Override the report method. This is the analyzer's entry point - def run(self): - # Put your analyzer's logic here - result = {} - - # This is just an example - if self.data_type == 'ip': - result['findings'] = ['1.1.1.1', '2.2.2.2', '3.3.3.3'] - elif self.data_type == 'domain': - result['findings'] = ['domain1.com', 'domain2.com', 'domain3.com'] - else: - return self.error('Unsupported observable data type') - - # Return the report - return self.report(result) - - # Override the summary method - def summary(self, raw_report): - return { - 'count': len(raw_report['findings']) - } - - # Override the artifacts method - def artifacts(self, raw_report): - result = [] - if 'findings' in raw_report: - for item in raw_report['findings']: - result.append({'type': self.data_type, 'value': item}) - - return result - -# Invoke the analyzer -if __name__ == '__main__': - BasicExampleAnalyzer().run() - -``` - -To call this analyzer, we can run the following command: - -``` -python sample-analyzer.py <<< '{ - "dataType":"ip", - "data": "8.8.8.8", - "config":{ - "required_prop": "anyvalue" - } -}' -``` - -This will generate the following output: - -```json -{ - "success" : true, - "artifacts" : [ - { - "value" : "1.1.1.1", - "type" : "ip" - }, - { - "value" : "2.2.2.2", - "type" : "ip" - }, - { - "value" : "3.3.3.3", - "type" : "ip" - } - ], - "summary" : { - "count" : 3 - }, - "full" : { - "findings" : [ - "1.1.1.1", - "2.2.2.2", - "3.3.3.3" - ] - } -} -``` - -And in Cortex ![](../images/cortex-report.png) - -# TheHive and Cortex analyzers -Using Cortex from an instance of [TheHive](https://github.com/CERT-BDF/TheHive/) helps the users improve the analysis report visualization. In fact, TheHive uses the outputs generated from Cortex analyzers in two ways: - -- Store the `summary` content as part of the observable's data. This is available for successful analysis jobs only. -- Display the `full`report using the report templates defined within TheHive. - -## Report templates -[TheHive](https://github.com/CERT-BDF/TheHive/) is based on Angular 1 and report templates have to be Angular templates which we try to fill using the job's report data. - -We distinguish 2 types of report templates: - -### Short reports -Generates what we call **mini reports**, to be displayed in the observable's details page and observables list. Short report templates receive the following data: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| name | String | Analyzer's ID | -| content | Object | The job report's `summary` object | -| artifact | Object | The observable details, as stored in TheHive | - -For example, if we want to create a short report template for the `BasicExampleAnalyzer`, we could write the following HTML short report file: - -```html -Basic: {{content.count || 0}} record(s) -``` - -`content` being the following: - -```json -{ - "count" : 3 -} -``` - -The result in TheHive will be ![](../images/short-report.png) - -### Long reports -Like short reports, the long reports are used to render the content of the `full` attribute of a job JSON report. - -Long report templates receive the following data: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| name | String | Analyzer's ID | -| status | String | The job's status: `Success`, `Failure`, `InProgress` | -| success | Boolean | The job's success status | -| content | Object | The job report's `full` object | -| artifact | Object | The observable details, as stored in TheHive | - - -For example, if we want to create a long report template for the `BasicExampleAnalyzer`, we could write the following HTML long report file: - -```html - -
-
- {{name}} -
-
-
{{content.findings.length}} {{artifact.dataType | uppercase}}(s) found form {{artifact.data | fang}}
-
    -
  • {{finding}}
  • -
-
-
- - -
-
- {{artifact.data | fang}} -
-
- {{content.errorMessage}} -
-
-``` - -`content` being the following: - -```json -{ - "findings" : [ - "1.1.1.1", - "2.2.2.2", - "3.3.3.3" - ] -} -``` - -The result in TheHive will be ![](../images/long-report.png) \ No newline at end of file diff --git a/docs/api/list-analyzers.md b/docs/api/list-analyzers.md deleted file mode 100644 index d1341a393..000000000 --- a/docs/api/list-analyzers.md +++ /dev/null @@ -1,51 +0,0 @@ -This API call returns the list of all the analyzers enabled within Cortex. - -**URL** -``` -GET /api/analyzer -``` - -**Output** - -Returns a JSON array representing a list of all the enabled analyzers. Each entry includes the following attributes: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| id | String | The analyzer's identifier | -| name | String| The analyzer's name | -| version | String | The analyzer's version | -| dataTypeList | String[] | An array of the observable datatypes that the analyzer can act upon | - -*Example* - -```json -[ - { - "name": "File_Info", - "version": "1.0", - "description": "Parse files in several formats such as OLE and OpenXML to detect VBA macros, extract their source code, generate useful information on PE, PDF files and much more.", - "dataTypeList": [ - "file" - ], - "id": "File_Info_1_0" - }, - { - "name": "HippoMore", - "version": "1.0", - "description": "Hippocampe detailed report: provides the last detailed report for an IP, domain or a URL", - "dataTypeList": [ - "ip", - "domain", - "fqdn", - "url" - ], - "id": "HippoMore_1_0" - } -] -``` - -**How to use it** - -``` -curl http://:/api/analyzer -``` \ No newline at end of file diff --git a/docs/api/list-jobs.md b/docs/api/list-jobs.md deleted file mode 100644 index 797be3e43..000000000 --- a/docs/api/list-jobs.md +++ /dev/null @@ -1,95 +0,0 @@ -This API call returns a list of analysis jobs. - -**URL** -``` -GET /api/job -``` - -**Input** - -This API call supports a list of filters and pagination parameters can be provided in the query: - -| Query Parameter | Default value | Description | -| ------------ | ------------- | ------------- | -| dataTypeFilter | Empty | A datatype value: ip, domain, hash etc... | -| dataFilter | Empty | A string representing a part of an observable value. Could be an IP or part of an IP, a domain, url and so on | -| analyzerFilter | Empty | An analyzer's ID | -| start | 0 | A number representing the index of the page start | -| limit | 10 | A number representing a page size | - -*Example* -``` -GET /api/job?analyzerFilter=Abuse_Finder_1_0&dataTypeFilter=domain&dataFilter=.com&start=0&limit=50 -``` - -should return the list of Abuse_Finder jobs corresponding to domains which include `.com`. - -**Output** - -Returns a JSON array representing a list of jobs. Each entry includes the following attributes: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| id | String | The job's id | -| analyzerId | String| The analyzer's id | -| status | String | The job's status: `Success`, `InProgress` or `Failure` | -| date | Number | A timestamp which corresponds to the job's start date | -| artifact | Object | The observable details | - -*Example* - -```json -[ - { - "id": "OsmbnQJGmeCgvDxP", - "analyzerId": "OTXQuery_1_0", - "status": "Failure", - "date": 1490194495264, - "artifact": { - "data": "8.8.8.8", - "attributes": { - "dataType": "ip", - "tlp": 2 - } - } - }, - { - "id": "c9uZDbHBf32DdIVJ", - "analyzerId": "MaxMind_GeoIP_2_0", - "status": "Success", - "date": 1490194495262, - "artifact": { - "data": "8.8.8.8", - "attributes": { - "dataType": "ip", - "tlp": 2 - } - } - }, - { - "id": "OcFlZbLNNUsIiJZq", - "analyzerId": "HippoMore_1_0", - "status": "InProgress", - "date": 1490194495259, - "artifact": { - "data": "8.8.8.8", - "attributes": { - "dataType": "ip", - "tlp": 2 - } - } - } -] -``` - -**How to use it** - -``` -curl http://:/api/job -``` - -or - -``` -curl 'http://:/api/job?start=0&limit=100' -``` \ No newline at end of file diff --git a/docs/api/run-analyzer.md b/docs/api/run-analyzer.md deleted file mode 100644 index 836063002..000000000 --- a/docs/api/run-analyzer.md +++ /dev/null @@ -1,138 +0,0 @@ -The API calls described below will let you run analyzers on one observable at a time. Two types of observables can be analyzed: -- Value-based: generally string values such as IP addresses, domains, hashes and so on. -- File-based: the file that you'd like to analyze must be submitted. - -## Analyze Value-based Observables - -**URL** -``` -POST /api/analyzer//run -``` - -`ANALYZER_ID` must be a valid analyzer `id`. - -**Input** - -This API call requires a JSON POST body describing an observable and the following attributes: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| data | String | The observable's value | -| attributes | ``Object | The observable's attributes | - - -The `` object structure is the following: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| dataType | String | The observable's data type | -| tlp | Number | The observable's TLP: 0 for `WHITE`, 1 for `GREEN`, 2 for `AMBER`, 3 for `RED` | - -*Example* - -``` -curl -XPOST -H 'Content-Type: application/json' http://:/api/analyzer/Hipposcore_1_0/run -d '{ - "data":"mydomain.com", - "attributes":{ - "dataType":"domain", - "tlp":2 - } -}' -``` - -This returns the details of the created analysis job. - -**Output** - -Returns a JSON object representing the started analysis job: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| id | String | The job's ID | -| analyzerId | String| The analyzer's ID | -| status | String | The job's status: `Success`, `InProgress` or `Failure` | -| date | Number | A timestamp which represents the job's start date | -| artifact | Object | The observable details | - -*Example* - -```json -{ - "id": "ymlrxZB8efyZhFEg", - "analyzerId": "Hipposcore_1_0", - "status": "Success", - "date": 1490263456480, - "artifact": { - "data": "mydomain.com", - "attributes": { - "dataType": "domain", - "tlp": 2 - } - } -} -``` - -## Analyze File-based observables - -**URL** -``` -POST /api/analyzer//run -``` - -`ANALYZER_ID` must be a valid analyzer `id`. - -**Input** - -This API call requires submitting the file to be analyzed by sending a request as a multipart format: - -- The first part must be named `data` and must contain the file. -- The second part must be named `_json` and must specify the observable details in JSON format, as described below: - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| dataType | String | The observable's data type | -| tlp | Number | The observable's TLP: 0 for `WHITE`, 1 for `GREEN`, 2 for `AMBER`, 3 for `RED` | - -*Example* - -``` -curl -XPOST http://:/api/analyzer/File_Info_1_0/run \ - -F '_json={ - "dataType":"file", - "tlp":2 - };type=application/json' \ - -F 'attachment=@file.png;type=image/png' -``` - -This returns the details of the created analysis job. - -**Output** - -Returns a JSON object representing the started analysis job. - -| Attribute | Type | Description | -| ------------ | ------------- | ------------- | -| id | String | The job id | -| analyzerId | String| The analyzer's id | -| status | String | The job's status: `Success`, `InProgress` or `Failure` | -| date | Number | A timestamp which represents the job's start date | -| artifact | Object | The observable details | - -*Example* - -```json -{ - "id": "LOcqObDtJEOayPuV", - "analyzerId": "File_Info_1_0", - "status": "Success", - "date": 1490265356725, - "artifact": { - "attributes": { - "dataType": "file", - "tlp": 2, - "content-type": "image/png", - "filename": "file.png" - } - } -} -``` diff --git a/docs/api/wait-and-get-job-report.md b/docs/api/wait-and-get-job-report.md deleted file mode 100644 index a81e6ccd7..000000000 --- a/docs/api/wait-and-get-job-report.md +++ /dev/null @@ -1,27 +0,0 @@ -This API call is almost the same as [Get Job Report API](Get-job-report-API) but introduces an asynchronous behavior. It means that this API can wait for a given amount of time until the job completes. It also supports a timeout parameter. - -Instead of returning the details of an in-progress job, it will wait until it finishes then returns the report, provided it doesn't timeout. - -**URL** -``` -GET /api/job//waitreport?atMost= -``` - -`JOB_ID` must be a valid job `id`. -`DURATION` should be a valid duration, default to infinite. The duration format is a string composed by a number and a unit (based on [Scala durations](http://www.scala-lang.org/api/2.9.3/scala/concurrent/duration/Duration.html)), for example: -- 10seconds -- 1minute -- 10minutes -- 2hours - -If `atMost`query parameter is not specified, it defaults to *Infinite* (which can be a very, very long time considering the age of the Universe). - -**Output** - -Same output as [Get Job Report API](Get-job-report-API). - -**How to use it** - -``` -curl http://:/api/job//waitreport -``` \ No newline at end of file diff --git a/docs/images/cortex-report.png b/docs/images/cortex-report.png deleted file mode 100644 index d10e618e1..000000000 Binary files a/docs/images/cortex-report.png and /dev/null differ diff --git a/docs/images/long-report.png b/docs/images/long-report.png deleted file mode 100644 index 6a384959e..000000000 Binary files a/docs/images/long-report.png and /dev/null differ diff --git a/docs/images/short-report.png b/docs/images/short-report.png deleted file mode 100644 index 2a7eaf411..000000000 Binary files a/docs/images/short-report.png and /dev/null differ diff --git a/docs/installation/README.md b/docs/installation/README.md deleted file mode 100644 index e35be780a..000000000 --- a/docs/installation/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Installation guides - -TheHive can be installed using: -- [rpm package](rpm-guide.md) -- [deb package](deb-guide.md) -- [docker](docker-guide.md) -- [binary](binary-guide.md) -- [ansible script](https://github.com/drewstinnett/ansible-thehive) contributed by -[@drewstinnett](https://github.com/drewstinnett) -- [build from sources](build-guide.md) \ No newline at end of file diff --git a/docs/installation/analyzers.md b/docs/installation/analyzers.md deleted file mode 100644 index 59dc5c166..000000000 --- a/docs/installation/analyzers.md +++ /dev/null @@ -1,121 +0,0 @@ -# Cortex analyzers - -Analyzers are autonomous applications managed by and run through the Cortex core engine. Analyzers have their -[own dedicated GitHub repository](https://github.com/CERT-BDF/Cortex-Analyzers). They are included in the Cortex binary -package but you have to get them from the repository if you decide to build Cortex from sources or if you need to update -them. - -## Pre-requisites -Currently, all provided analyzers are written in Python. They don't require any build phase but their dependencies have -to be installed. Before proceeding, you'll need to install the system package dependencies that are required by some of -them: - -``` -apt-get install python-pip python2.7-dev ssdeep libfuzzy-dev libfuzzy2 libimage-exiftool-perl libmagic1 build-essential -``` - -Each analyzer comes with its own, pip compatible `requirements.txt` file. You can install all requirements with the -following commands: - -``` -cd analyzers -sudo pip install $(cat */requirements.txt | sort -u) -``` - -## From repository -If you want to get up-to-date analyzers, you can clone the GitHub repository: - -``` -git clone https://github.com/CERT-BDF/Cortex-Analyzers -``` - -Next, you'll need to tell Cortex where to find the analyzers. Currently, all the analyzers must be in the same -directory. Add the following to the Cortex configuration file (`application.conf`): - -``` -analyzer { - path = "path/to/analyzers" -} -``` -## Configuration - -Analyzers configuration is stored in Cortex configuration file (application.conf) in `analyzer.config` section. There is -one subsection for each analyzer group. The configuration provided to analyzer is the merge of: - - the global configuration: all item in `analyzer.config.global` section. This settings are applied for all analyzers. - It is particularly useful for proxy settings (cf. example below) - - the analyzer group configuration. Some analyzers shares configuration items, VirusTotal API key for all VirusTotal - analyzers for example. Group name can be found in JSON description file in analyzer folder, under `baseConfig` key. - - the analyzer configuration defined in JSON description file, under `config` key. - -Here is the complete configuration you should provide to make all analyzers work: - - -``` -analyzer { - path = "path/to/Cortex-Analyzers/analyzers" - config { - global { - proxy { - http="http://PROXY_ADDRESS:PORT", - https="http://PROXY_ADDRESS:PORT" - } - } - CIRCLPassiveDNS { - user= "..." - password= "..." - } - CIRCLPassiveSSL { - user= "..." - password= "..." - } - DNSDB { - server="https://api.dnsdb.info" - key="..." - } - DomainTools { - username="..." - key="..." - } - GoogleSafebrowsing { - key = "..." - } - Hippocampe { - url="..." - } - JoeSandbox { - url = "..." - apikey = "..." - } - Nessus { - url ="..." - login="..." - password="..." - policy="..." - ca_bundle="..." - allowed_network="..." - } - OTXQuery { - key="..." - } - PassiveTotal { - key="..." - username="..." - } - PhishingInitiative { - key="..." - } - PhishTank { - key="..." - } - Virusshare { - path = "..." - } - VirusTotal { - key="..." - } - Yara { - rules=["..."] - } - } -} -``` diff --git a/docs/installation/binary-guide.md b/docs/installation/binary-guide.md deleted file mode 100644 index 88a381a9c..000000000 --- a/docs/installation/binary-guide.md +++ /dev/null @@ -1,95 +0,0 @@ -# Installation Guide for Ubuntu 16.04 LTS - -This guide describes the manual installation of Cortex from binaries in Ubuntu 16.04. - -# 1. Minimal Ubuntu Installation - -Install a minimal Ubuntu 16.04 system with the following software: - * Java runtime environment 1.8+ (JRE) - -Make sure your system is up-to-date: - -``` -sudo apt-get update -sudo apt-get upgrade -``` - -# 2. Install a Java Virtual Machine -You can install either Oracle Java or OpenJDK. - -## 2.1. Oracle Java -``` -echo 'deb http://ppa.launchpad.net/webupd8team/java/ubuntu trusty main' | sudo tee -a /etc/apt/sources.list.d/java.list -sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-key EEA14886 -sudo apt-get update -sudo apt-get install oracle-java8-installer -``` - -## 2.2 OpenJDK -``` -sudo add-apt-repository ppa:openjdk-r/ppa -sudo apt-get update -sudo apt-get install openjdk-8-jre-headless - -``` - -# 3. Install Cortex - -Binary package can be downloaded at [thehive-cortex.zip](https://dl.bintray.com/cert-bdf/cortex/cortex-latest.zip) - -Download and unzip the chosen binary package. TheHive files can be installed wherever you want on the filesystem. In -this guide, we decided to set it in `/opt`. - -``` -cd /opt -wget https://dl.bintray.com/cert-bdf/cortex/cortex-latest.zip -unzip cortex-latest.zip -ln -s cortex-x.x.x cortex -``` - - -# 4. First start - -Change your current directory to Cortex installation directory (`/opt/cortex` in this guide), then execute: - -``` -bin/cortex -Dconfig.file=/etc/cortex/application.conf -``` - -It is recommended to use a dedicated non-privilege user to start Cortex. If so, make sure that your user can create log file in `/opt/cortex/logs` - -If you'd rather start the application as a service, do the following: -``` -sudo addgroup cortex -sudo adduser --system cortex -sudo cp /opt/cortex/package/cortex.service /usr/lib/systemd/system -sudo chown -R cortex:cortex /opt/cortex -sudo chgrp cortex /etc/cortex/application.conf -sudo chmod 640 /etc/cortex/application.conf -sudo systemctl enable cortex -sudo service cortex start -``` - -Please note that the service may take some time to start. - -Cortex comes with a simplistic frontend. Open your browser and connect to `http://YOUR_SERVER_ADDRESS:9000/` - -# 5. Plug analysers - -Now that Cortex starts successfully, downloads `Cortex-Analyzers` and edit the configuration file and set the path to -`Cortex-Analyzers/analyzers`. Follow details available in the [analyzers page](analyzers.md). - -## 6. Update - -To update Cortex from binaries, just stop the service, download the latest package, rebuild the link `/opt/cortex` and -restart the service. - -``` -service cortex stop -cd /opt -wget https://dl.bintray.com/cert-bdf/cortex/cortex-latest.zip -unzip cortex-latest.zip -rm /opt/cortex && ln -s cortex-x.x.x cortex -chown -R cortex:cortex /opt/cortex /opt/cortex-x.x.x -service cortex start -``` diff --git a/docs/installation/build-guide.md b/docs/installation/build-guide.md deleted file mode 100644 index adb7e0e26..000000000 --- a/docs/installation/build-guide.md +++ /dev/null @@ -1,144 +0,0 @@ -# Build from sources - -This document is a step-by-step guide to build Cortex from sources. - -## 1. Pre-requisites - -The following softwares are required to download and build Cortex. - -* Java Development Kit 8 (JDK) - * downloadable from http://www.oracle.com/technetwork/java/javase/downloads/index.html -* git - * Use the system package or downloadable it from http://www.git-scm.com/downloads -* NodeJs with its package manager (NPM) - * downloadable from https://nodejs.org/en/download/ -* Grunt - * After NodeJs installation, run `sudo npm install -g grunt-cli` -* Bower - * After NodeJs installation, run `sudo npm install -g bower` - - -# 2. Quick Build Guide - -To install the requirements and build Cortex from sources, please follow the instructions below depending on your -operating system. - -## 2.1. CentOS/RHEL - -### 2.1.1. Packages - -``` -sudo yum -y install git bzip2 -``` - -### 2.1.2. Installation of OpenJDK - -``` -sudo yum -y install java-1.8.0-openjdk-devel -``` - -### 2.1.3. Installation of NodeJs - -Install the EPEL Repository: - -You should have the "extras" repository enabled, then: -``` -sudo yum -y install epel-release -``` - -Then, you can install NodeJs: - -``` -sudo yum -y install nodejs -``` - -### 2.1.4. Installation of bower and grunt - -``` -sudo npm install -g bower grunt-cli -``` - -## 2.2. Ubuntu - -### 2.2.1. Packages - -``` -sudo apt-get install git wget -``` - -### 2.2.2. Installation of Oracle JDK - -``` -echo 'deb http://ppa.launchpad.net/webupd8team/java/ubuntu trusty main' | sudo tee -a /etc/apt/sources.list.d/java.list -sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-key EEA14886 -sudo apt-get update -sudo apt-get install oracle-java8-installer -``` - -### 2.2.3. Installation of NodeJs - -``` -sudo apt-get install wget -wget -qO- https://deb.nodesource.com/setup_4.x | sudo bash - -sudo apt-get install nodejs -``` - -### 2.2.4. Installation of bower and grunt - -``` -sudo npm install -g bower grunt-cli -``` - -## 2.3. Cortex - -### Download sources - -``` -git clone https://github.com/CERT-BDF/Cortex.git -``` - -### Build the projects - -``` -cd Cortex -bin/activator clean stage -``` - -It will download all dependencies (could be long) then build the back-end. -This command clean previous build files and create an autonomous package in `target/universal/stage` directory. This -packages contains Cortex binaries with required libraries (`/lib`), analyzers (`/analyzers`), configuration files -(`/conf`) and startup scripts (`/bin`). - -Binaries are built and stored in `Cortex/target/universal/stage/`. Install them in `/opt/cortex` for example. - -``` -sudo cp -r Cortex/target/universal/stage /opt/cortex -``` - -Follow the [configuration guide](../admin/configuration.md) to run Cortex. - - -### First start - -Follow [First start in the Installation guide](binary-guide.md#4-first-start) to start using Cortex. - - -## Build the front-end only -Building back-end builds also front-end, so you don't need to build front-end separately. This section is useful only -for troubleshooting or in order to install front-end in a reverse proxy. - -Go to front-end directory: -``` -cd Cortex/ui -``` - -Install NodeJs libraries (required by building step), bower libraries (javascript libraries downloaded by browser). Then -build the front-end : -``` -npm install -bower install -grunt build -``` - -This step generates static files (html, javascript and related resources) in `dist` directory. These files are ready to -be imported in http server. diff --git a/docs/installation/deb-guide.md b/docs/installation/deb-guide.md deleted file mode 100644 index 7fbecf1af..000000000 --- a/docs/installation/deb-guide.md +++ /dev/null @@ -1,13 +0,0 @@ -# Installation of TheHive using DEB package - -Debian packages are published on Bintray repository. All packages are signed using the key [562CBC1C](/PGP-PUBLIC-KEY) -(fingerprint: 0CD5 AC59 DE5C 5A8E 0EE1 3849 3D99 BB18 562C BC1C): - -``` -echo 'deb https://dl.bintray.com/cert-bdf/debian any main' | sudo tee -a /etc/apt/sources.list.d/thehive-project.list -sudo apt-key adv --keyserver hkp://pgp.mit.edu --recv-key 562CBC1C -sudo apt-get update -sudo apt-get install cortex -``` - -After package installation, you should configure Cortex (see [configuration guide](../admin/configuration.md)) \ No newline at end of file diff --git a/docs/installation/docker-guide.md b/docs/installation/docker-guide.md deleted file mode 100644 index 13696c543..000000000 --- a/docs/installation/docker-guide.md +++ /dev/null @@ -1,50 +0,0 @@ -# Install TheHive using docker - -This guide assume that you will use docker. - -## How to use this image - -Easiest way to start Cortex: -``` -docker run certbdf/cortex -``` - -From version 1.1.0, we don't provide the all-in-one docker (an image which contained TheHive and Cortex). If you want to -run TheHive and Cortex in docker, follow the -[TheHive docker guide](https://github.com/CERT-BDF/TheHive/blob/master/docs/installation/docker-guide.md). - -## Analyzers - -Analyzers are embedded in docker image in /opt/Cortex-Analyzers/analyzers. If you want to update then, you should -install them outside docker and overwrite existing ones: -``` -docker run --volume /path/to/analyzers:/opt/Cortex-Analyzers/analyzers:ro certbdf/cortex:latest -``` - -Most analyzers require configuration. You can inject configuration file using volume argument: -``` -docker run --volume /path/to/your/configuration:/etc/cortex/application.conf:ro certbdf/cortex:latest -``` - -You should also publish HTTP service to make Cortex available. This is done by adding publish parameter: -``` -docker run --publish 0.0.0.0:8080:9000 certbdf/cortex:latest -``` -This command exposes Cortex service on port 8080/tcp. - -## Customize Cortex docker - -By Default, Cortex docker add minimal configuration: - - choose a random secret (play.crypto.secret) - - configure analyzer path - -This behavious can be disabled by adding `--no-config` to docker command line: -`docker run certbdf/cortex:latest --no-config`. - -Docker image accepts more options: - - --no-config : do not try to configure Cortex (add secret and analyzers location) - - --no-config-secret : do not add random secret to configuration - - --secret : secret to secure sessions - - --analyzer-path : where analyzers are located - - diff --git a/docs/installation/rpm-guide.md b/docs/installation/rpm-guide.md deleted file mode 100644 index 8cc639d6c..000000000 --- a/docs/installation/rpm-guide.md +++ /dev/null @@ -1,18 +0,0 @@ -# Installation of TheHive using RPM package - -RPM packages are published on Bintray repository. All packages are signed using the key [562CBC1C](/PGP-PUBLIC-KEY) -(fingerprint: 0CD5 AC59 DE5C 5A8E 0EE1 3849 3D99 BB18 562C BC1C) - -First install rpm release package: -``` -yum install install https://dl.bintray.com/cert-bdf/rpm/thehive-project-release-1.0.0-3.noarch.rpm -``` -This will install TheHive Project repository (in /etc/yum.repos.d/thehive-rpm.repo) and the GPG public key (in -/etc/pki/rpm-gpg/GPG-TheHive-Project). - -Then you will able to install Cortex package using yum -``` -yum install cortex -``` - -After package installation, you should configure Cortex (see [configuration guide](../admin/configuration.md)) \ No newline at end of file diff --git a/package/docker/entrypoint b/package/docker/entrypoint index 08f2fd167..fd2e5a721 100755 --- a/package/docker/entrypoint +++ b/package/docker/entrypoint @@ -4,6 +4,7 @@ CONFIG_SECRET=1 CONFIG=1 CONFIG_FILE=/etc/cortex/application.conf ANALYZER_PATH=/opt/Cortex-Analyzers/analyzers +MISP_MODULE=1 function usage { cat <<- _EOF_ @@ -12,6 +13,7 @@ function usage { --no-config-secret | do not add random secret to configuration --secret | secret to secure sessions --analyzer-path | where analyzers are located + --no-misp-modules | disable MISP modules _EOF_ exit 1 } @@ -24,6 +26,7 @@ do "--no-config-secret") CONFIG_SECRET=0;; "--secret") shift; SECRET=$1;; "--analyzer-path") shift; ANALYZER_PATH=$1;; + "--no-misp-modules") shift; MISP_MODULE=0;; "--") STOP=1;; *) usage esac @@ -45,6 +48,11 @@ then echo analyzer.path=\"$ANALYZER_PATH\" >> $CONFIG_FILE + if test $MISP_MODULE = 1 + then + echo 'misp.modules.enabled = true' >> $CONFIG_FILE + fi + echo 'include file("/etc/cortex/application.conf")' >> $CONFIG_FILE fi diff --git a/package/etc_default_cortex b/package/etc_default_cortex new file mode 100644 index 000000000..b965c6d21 --- /dev/null +++ b/package/etc_default_cortex @@ -0,0 +1,17 @@ +# ##################################### +# ##### Environment Configuration ##### +# ##################################### + +# *WARNING* This file is not read by if you are using systemd + +# This file gets sourced before the actual startscript +# gets executed. You can use this file to provide +# environment variables + +# Define if Cortex service is enabled (no by default) +# ----------------- +ENABLED=no + +# Setting DAEMON_ARGS +# pidfile is disabled (/dev/null) has it is handle by system loader (upstart/sysVinit) +#DAEMON_ARGS=-Dconfig.file=/etc/cortex/cortex.conf -Dlogger.file=/etc/cortex/logback.xml -Dpidfile.path=/dev/null diff --git a/ui/Gruntfile.js b/ui/Gruntfile.js index 62cddcbef..eeb160d96 100644 --- a/ui/Gruntfile.js +++ b/ui/Gruntfile.js @@ -248,7 +248,7 @@ module.exports = function(grunt) { src: [ '<%= yeoman.dist %>/scripts/{,*/}*.js', '<%= yeoman.dist %>/styles/{,*/}*.css', - '<%= yeoman.dist %>/images/{,*/}*.{png,jpg,jpeg,gif,webp,svg}', + // '<%= yeoman.dist %>/images/{,*/}*.{png,jpg,jpeg,gif,webp,svg}', '<%= yeoman.dist %>/styles/fonts/*' ] } @@ -404,7 +404,7 @@ module.exports = function(grunt) { 'images/{,*/}*.{webp}', 'styles/fonts/{,*/}*.*' ] - }, { + },{ expand: true, cwd: '.tmp/images', dest: '<%= yeoman.dist %>/images', @@ -427,6 +427,12 @@ module.exports = function(grunt) { cwd: '<%= yeoman.app %>/styles', dest: '.tmp/styles/', src: '{,*/}*.css' + }, + images: { + expand: true, + cwd: '<%= yeoman.app %>/images', + dest: '<%= yeoman.dist %>/images', + src: '{,*/}*.{png,jpg,jpeg,gif,svg}' } }, @@ -439,7 +445,8 @@ module.exports = function(grunt) { 'copy:styles' ], dist: [ - 'copy:styles'/*, + 'copy:styles', + 'copy:images'/*, 'imagemin', 'svgmin'*/ ] diff --git a/ui/app/apple-touch-icon.png b/ui/app/apple-touch-icon.png deleted file mode 100644 index cfd4700dc..000000000 Binary files a/ui/app/apple-touch-icon.png and /dev/null differ diff --git a/ui/app/favicon.png b/ui/app/favicon.png deleted file mode 100644 index cfd4700dc..000000000 Binary files a/ui/app/favicon.png and /dev/null differ diff --git a/ui/app/images/brain-black.png b/ui/app/images/brain-black.png deleted file mode 100644 index cfd4700dc..000000000 Binary files a/ui/app/images/brain-black.png and /dev/null differ diff --git a/ui/app/images/brain-white.png b/ui/app/images/brain-white.png deleted file mode 100644 index 42cb34f7b..000000000 Binary files a/ui/app/images/brain-white.png and /dev/null differ diff --git a/ui/bower.json b/ui/bower.json index 3fdb8d93c..b395db1e0 100644 --- a/ui/bower.json +++ b/ui/bower.json @@ -1,6 +1,6 @@ { "name": "cortex", - "version": "1.0.2", + "version": "1.1.1", "dependencies": { "angular": "1.5.10", "angular-sanitize": "1.5.10", diff --git a/ui/package.json b/ui/package.json index 62340c891..f94ad7faa 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "cortex", - "version": "1.0.2", + "version": "1.1.1", "license": "AGPL-3.0", "repository": { "type": "git", diff --git a/version.sbt b/version.sbt index 48af491ec..61ca1d82e 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "1.1.0" +version in ThisBuild := "1.1.1"