From 345d841679160156c12a8d855214e5ac57739d64 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 5 Jun 2023 13:59:31 +0530 Subject: [PATCH 01/12] Update test.yml Test compatibility with JDK 1.17 --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 84ca35c8..b9d6148f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,7 @@ jobs: os: - ubuntu-latest java-version: - - 1.8 + - 1.17 splunk-version: - "8.2" - "latest" From 623ef470ac677628351c2c04cfd616f689e3122c Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 5 Jun 2023 14:03:00 +0530 Subject: [PATCH 02/12] Update test.yml --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b9d6148f..66965b0e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -39,7 +39,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v1 with: - java-version: 1.8 + java-version: 1.17 - name: Cache local Maven repository uses: actions/cache@v2 From 47ed8567f9b45f5c980d3105337d694834786f3f Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 5 Jun 2023 14:39:04 +0530 Subject: [PATCH 03/12] Update test.yml --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 66965b0e..fe8311e8 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,7 @@ jobs: os: - ubuntu-latest java-version: - - 1.17 + - 17 splunk-version: - "8.2" - "latest" @@ -39,7 +39,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v1 with: - java-version: 1.17 + java-version: 17 - name: Cache local Maven repository uses: actions/cache@v2 From 70b8a0691184ec4d15e77973f28a6cdf609d323b Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Thu, 3 Aug 2023 16:49:26 +0530 Subject: [PATCH 04/12] JDK 17 upgrade changes - InstanceOf operator upgraded - use of diamond operator - try with resources --- pom.xml | 4 +- splunk/src/main/java/com/splunk/Args.java | 370 +-- splunk/src/main/java/com/splunk/AtomFeed.java | 230 +- .../src/main/java/com/splunk/AtomObject.java | 496 ++-- splunk/src/main/java/com/splunk/Command.java | 482 ++-- .../src/main/java/com/splunk/DataModel.java | 604 ++--- .../java/com/splunk/DataModelCalculation.java | 331 ++- .../main/java/com/splunk/DataModelObject.java | 788 +++--- splunk/src/main/java/com/splunk/Entity.java | 986 ++++---- splunk/src/main/java/com/splunk/Event.java | 428 ++-- .../src/main/java/com/splunk/FieldType.java | 142 +- .../main/java/com/splunk/FiredAlertGroup.java | 86 +- .../src/main/java/com/splunk/HttpService.java | 12 +- splunk/src/main/java/com/splunk/Index.java | 2230 ++++++++--------- .../main/java/com/splunk/InputCollection.java | 814 +++--- .../src/main/java/com/splunk/LicensePool.java | 346 +-- .../java/com/splunk/ModularInputKind.java | 222 +- .../java/com/splunk/PasswordCollection.java | 304 +-- .../java/com/splunk/PivotSpecification.java | 982 ++++---- .../main/java/com/splunk/RequestMessage.java | 228 +- .../java/com/splunk/ResourceCollection.java | 772 +++--- .../main/java/com/splunk/ResponseMessage.java | 160 +- .../java/com/splunk/ResultsReaderJson.java | 2 +- .../java/com/splunk/ResultsReaderXml.java | 4 +- splunk/src/main/java/com/splunk/Service.java | 60 +- splunk/src/main/java/com/splunk/Settings.java | 578 ++--- .../java/com/splunk/SimpleCookieStore.java | 186 +- splunk/src/main/java/com/splunk/TcpInput.java | 638 +++-- .../splunk/modularinput/InputDefinition.java | 449 ++-- .../modularinput/MultiValueParameter.java | 165 +- .../com/splunk/modularinput/Parameter.java | 196 +- .../java/com/splunk/modularinput/Scheme.java | 488 ++-- .../modularinput/SingleValueParameter.java | 241 +- .../modularinput/ValidationDefinition.java | 477 ++-- 34 files changed, 7235 insertions(+), 7266 deletions(-) mode change 100755 => 100644 splunk/src/main/java/com/splunk/modularinput/InputDefinition.java mode change 100755 => 100644 splunk/src/main/java/com/splunk/modularinput/MultiValueParameter.java mode change 100755 => 100644 splunk/src/main/java/com/splunk/modularinput/Parameter.java mode change 100755 => 100644 splunk/src/main/java/com/splunk/modularinput/Scheme.java mode change 100755 => 100644 splunk/src/main/java/com/splunk/modularinput/SingleValueParameter.java mode change 100755 => 100644 splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java diff --git a/pom.xml b/pom.xml index 920736a6..02518662 100644 --- a/pom.xml +++ b/pom.xml @@ -9,8 +9,8 @@ 1.9.4 true UTF-8 - 8 - 8 + 17 + 17 com.splunk diff --git a/splunk/src/main/java/com/splunk/Args.java b/splunk/src/main/java/com/splunk/Args.java index 4e93f36a..84706ed6 100644 --- a/splunk/src/main/java/com/splunk/Args.java +++ b/splunk/src/main/java/com/splunk/Args.java @@ -1,185 +1,185 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Map.Entry; - -/** - * The {@code Args} class is a helper class for working with Splunk REST API - * arguments. - * - * This extension is used mainly for encoding arguments for UTF8 transmission - * to a Splunk instance in a key=value pairing for a string, or - * {@code key=value1&key=value2 } (and so on) for an array of strings. - */ -public class Args extends LinkedHashMap { - - /** - * Class constructor. - */ - public Args() { super(); } - - /** - * Class constructor. Initializes a single key-value pair. - * - * @param key The key name. - * @param value The value, as a {@code String:String} or - * {@code String:String[]}. - */ - public Args(String key, Object value) { - super(); - put(key, value); - } - - /** - * Class constructor. Initializes a pre-existing hash map. - * - * @param values A set of key-value pairs. - */ - public Args(Map values) { - super(values); - } - - /** - * Adds an argument to an {@code Args} object. - * - * @param key The key name. - * @param value The value, as a {@code String:String} or - * {@code String:String[]}. - * @return This {@code Args} set. - */ - public Args add(String key, Object value) { - put(key, value); - return this; - } - - /** - * Creates a new empty instance of {@code Args}. - * - * @return The {@code Args} instance. - */ - public static Args create() { - return new Args(); - } - - /** - * Creates a new {@code Args} instance and initializes it with a single - * key-value pair. - * - * @param key The key name. - * @param value The value, as a {@code String:String} or - * {@code String:String[]}. - * @return The {@code Args} instance. - */ - public static Args create(String key, Object value) { - return new Args(key, value); - } - - /** - * Creates a new {@code Args} instance and initializes it with a - * pre-existing hash map. - * - * @param values The pre-existing hash map. - * @return The {@code Args} instance. - */ - public static Args create(Map values) { - return values == null ? new Args() : new Args(values); - } - - /** - * Encodes a single string with UTF8 encoding. - * - * @param value The string. - * @return The encoded string. - */ - public static String encode(String value) { - if (value == null) return ""; - String result = null; - try { - result = URLEncoder.encode(value, "UTF-8"); - } - catch (UnsupportedEncodingException e) { assert false; } - return result; - } - - /** - * Encodes a hash map of {@code String:String} or {@code String:String[]} - * into a single UTF8-encoded string. - * - * @param args The hash map. - * @return The string. - */ - public static String encode(Map args) { - return Args.create(args).encode(); - } - - // Encodes an argument with a list-valued argument. - private void - encodeValues(StringBuilder builder, String key, String[] values) { - key = encode(key); - for (String value : values) { - if (builder.length() > 0) builder.append('&'); - builder.append(key); - builder.append('='); - builder.append(encode(value)); - } - } - - /** - * Encodes an {@code Args} instance into a UTF8-encoded string. - * - * @return The UTF8-encoded string. - */ - public String encode() { - StringBuilder builder = new StringBuilder(); - for (Entry entry : entrySet()) { - if (builder.length() > 0) builder.append('&'); - String key = entry.getKey(); - Object value = entry.getValue(); - if (value instanceof String[]) { - encodeValues(builder, key, (String[])value); - } - else { - builder.append(encode(key)); - builder.append('='); - builder.append(encode(value.toString())); - } - } - return builder.toString(); - } - - /** - * Returns the hash-map value of a specific key, or the default value if - * the key is not found. - * - * @param args The hash map. - * @param key The key to look for. - * @param defaultValue The default value, if the key is not found. - * @param The class type. - * @return The value. - */ - public static T - get(Map args, String key, T defaultValue) { - if (!args.containsKey(key)) return defaultValue; - return (T)args.get(key); - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; + +/** + * The {@code Args} class is a helper class for working with Splunk REST API + * arguments. + * + * This extension is used mainly for encoding arguments for UTF8 transmission + * to a Splunk instance in a key=value pairing for a string, or + * {@code key=value1&key=value2 } (and so on) for an array of strings. + */ +public class Args extends LinkedHashMap { + + /** + * Class constructor. + */ + public Args() { super(); } + + /** + * Class constructor. Initializes a single key-value pair. + * + * @param key The key name. + * @param value The value, as a {@code String:String} or + * {@code String:String[]}. + */ + public Args(String key, Object value) { + super(); + put(key, value); + } + + /** + * Class constructor. Initializes a pre-existing hash map. + * + * @param values A set of key-value pairs. + */ + public Args(Map values) { + super(values); + } + + /** + * Adds an argument to an {@code Args} object. + * + * @param key The key name. + * @param value The value, as a {@code String:String} or + * {@code String:String[]}. + * @return This {@code Args} set. + */ + public Args add(String key, Object value) { + put(key, value); + return this; + } + + /** + * Creates a new empty instance of {@code Args}. + * + * @return The {@code Args} instance. + */ + public static Args create() { + return new Args(); + } + + /** + * Creates a new {@code Args} instance and initializes it with a single + * key-value pair. + * + * @param key The key name. + * @param value The value, as a {@code String:String} or + * {@code String:String[]}. + * @return The {@code Args} instance. + */ + public static Args create(String key, Object value) { + return new Args(key, value); + } + + /** + * Creates a new {@code Args} instance and initializes it with a + * pre-existing hash map. + * + * @param values The pre-existing hash map. + * @return The {@code Args} instance. + */ + public static Args create(Map values) { + return values == null ? new Args() : new Args(values); + } + + /** + * Encodes a single string with UTF8 encoding. + * + * @param value The string. + * @return The encoded string. + */ + public static String encode(String value) { + if (value == null) return ""; + String result = null; + try { + result = URLEncoder.encode(value, "UTF-8"); + } + catch (UnsupportedEncodingException e) { assert false; } + return result; + } + + /** + * Encodes a hash map of {@code String:String} or {@code String:String[]} + * into a single UTF8-encoded string. + * + * @param args The hash map. + * @return The string. + */ + public static String encode(Map args) { + return Args.create(args).encode(); + } + + // Encodes an argument with a list-valued argument. + private void + encodeValues(StringBuilder builder, String key, String[] values) { + key = encode(key); + for (String value : values) { + if (builder.length() > 0) builder.append('&'); + builder.append(key); + builder.append('='); + builder.append(encode(value)); + } + } + + /** + * Encodes an {@code Args} instance into a UTF8-encoded string. + * + * @return The UTF8-encoded string. + */ + public String encode() { + StringBuilder builder = new StringBuilder(); + for (Entry entry : entrySet()) { + if (builder.length() > 0) builder.append('&'); + String key = entry.getKey(); + Object value = entry.getValue(); + if (value instanceof String[] valueInst) { + encodeValues(builder, key, valueInst); + } + else { + builder.append(encode(key)); + builder.append('='); + builder.append(encode(value.toString())); + } + } + return builder.toString(); + } + + /** + * Returns the hash-map value of a specific key, or the default value if + * the key is not found. + * + * @param args The hash map. + * @param key The key to look for. + * @param defaultValue The default value, if the key is not found. + * @param The class type. + * @return The value. + */ + public static T + get(Map args, String key, T defaultValue) { + if (!args.containsKey(key)) return defaultValue; + return (T)args.get(key); + } +} + diff --git a/splunk/src/main/java/com/splunk/AtomFeed.java b/splunk/src/main/java/com/splunk/AtomFeed.java index 27915701..81554db5 100644 --- a/splunk/src/main/java/com/splunk/AtomFeed.java +++ b/splunk/src/main/java/com/splunk/AtomFeed.java @@ -1,115 +1,115 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.InputStream; -import java.util.*; -import javax.xml.stream.XMLStreamException; -import javax.xml.stream.XMLStreamReader; -import javax.xml.stream.XMLStreamConstants; - -/** - * The {@code AtomFeed} class represents an Atom feed. - */ -public class AtomFeed extends AtomObject { - /** The list of Atom entries contained in this {@code AtomFeed} object. */ - public ArrayList entries = new ArrayList(); - - /** The value of the Atom feed's {@code } element. */ - public String itemsPerPage = null; - - /** The value of the Atom feed's {@code } element. */ - public String startIndex = null; - - /** The value of the Atom feed's {@code } element. */ - public String totalResults = null; - - /** - * Creates a new {@code AtomFeed} instance. - * - * @return A new {@code AtomFeed} instance. - */ - static AtomFeed create() { - return new AtomFeed(); - } - - /** - * Creates a new {@code AtomFeed} instance based on the given stream. - * - * @param input The input stream. - * @return An {@code AtomFeed} instance representing the parsed stream. - */ - public static AtomFeed parseStream(InputStream input) { - XMLStreamReader reader = createReader(input); - - AtomFeed result = AtomFeed.parse(reader); - - try { - reader.close(); - } - catch (XMLStreamException e) { - throw new RuntimeException(e.getMessage(), e); - } - - return result; - } - - /** - * Creates a new {@code AtomFeed} instance based on a given XML element. - * - * @param input The XML stream. - * @return An {@code AtomFeed} instance representing the parsed element. - * @throws RuntimeException The runtime exception if a parse error occurs. - */ - static AtomFeed parse(XMLStreamReader input) { - AtomFeed feed = AtomFeed.create(); - feed.load(input, "feed"); - return feed; - } - - /** - * Initializes the current instance from a given XML element. - * - * @param reader The XML reader. - */ - @Override void init(XMLStreamReader reader) { - assert reader.isStartElement(); - - String name = reader.getLocalName(); - - if (name.equals("entry")) { - AtomEntry entry = AtomEntry.parse(reader); - this.entries.add(entry); - } - else if (name.equals("messages")) { - parseEnd(reader); - } - else if (name.equals("totalResults")) { - this.totalResults = parseText(reader); - } - else if (name.equals("itemsPerPage")) { - this.itemsPerPage = parseText(reader); - } - else if (name.equals("startIndex")) { - this.startIndex = parseText(reader); - } - else { - super.init(reader); - } - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.InputStream; +import java.util.*; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; +import javax.xml.stream.XMLStreamConstants; + +/** + * The {@code AtomFeed} class represents an Atom feed. + */ +public class AtomFeed extends AtomObject { + /** The list of Atom entries contained in this {@code AtomFeed} object. */ + public ArrayList entries = new ArrayList<>(); + + /** The value of the Atom feed's {@code } element. */ + public String itemsPerPage = null; + + /** The value of the Atom feed's {@code } element. */ + public String startIndex = null; + + /** The value of the Atom feed's {@code } element. */ + public String totalResults = null; + + /** + * Creates a new {@code AtomFeed} instance. + * + * @return A new {@code AtomFeed} instance. + */ + static AtomFeed create() { + return new AtomFeed(); + } + + /** + * Creates a new {@code AtomFeed} instance based on the given stream. + * + * @param input The input stream. + * @return An {@code AtomFeed} instance representing the parsed stream. + */ + public static AtomFeed parseStream(InputStream input) { + XMLStreamReader reader = createReader(input); + + AtomFeed result = AtomFeed.parse(reader); + + try { + reader.close(); + } + catch (XMLStreamException e) { + throw new RuntimeException(e.getMessage(), e); + } + + return result; + } + + /** + * Creates a new {@code AtomFeed} instance based on a given XML element. + * + * @param input The XML stream. + * @return An {@code AtomFeed} instance representing the parsed element. + * @throws RuntimeException The runtime exception if a parse error occurs. + */ + static AtomFeed parse(XMLStreamReader input) { + AtomFeed feed = AtomFeed.create(); + feed.load(input, "feed"); + return feed; + } + + /** + * Initializes the current instance from a given XML element. + * + * @param reader The XML reader. + */ + @Override void init(XMLStreamReader reader) { + assert reader.isStartElement(); + + String name = reader.getLocalName(); + + if (name.equals("entry")) { + AtomEntry entry = AtomEntry.parse(reader); + this.entries.add(entry); + } + else if (name.equals("messages")) { + parseEnd(reader); + } + else if (name.equals("totalResults")) { + this.totalResults = parseText(reader); + } + else if (name.equals("itemsPerPage")) { + this.itemsPerPage = parseText(reader); + } + else if (name.equals("startIndex")) { + this.startIndex = parseText(reader); + } + else { + super.init(reader); + } + } +} + diff --git a/splunk/src/main/java/com/splunk/AtomObject.java b/splunk/src/main/java/com/splunk/AtomObject.java index 67567134..21ed8132 100644 --- a/splunk/src/main/java/com/splunk/AtomObject.java +++ b/splunk/src/main/java/com/splunk/AtomObject.java @@ -1,248 +1,248 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.InputStream; -import java.util.HashMap; -import java.util.Map; -import javax.xml.stream.Location; -import javax.xml.stream.XMLInputFactory; -import javax.xml.stream.XMLStreamException; -import javax.xml.stream.XMLStreamReader; -import javax.xml.stream.XMLStreamConstants; - -/** - * The {@code AtomObject} class represents a generic Atom object. This class is - * a common base class shared by {@code AtomFeed} and {@code AtomEntry}. - */ -public class AtomObject { - /** The value of the Atom {@code } element. */ - public String id; - - /** The value of the {@code } elements in this {@code AtomObject}. */ - public Map links = new HashMap(); - - /** The value of the Atom {@code } element. */ - public String title; - - /** The value of the Atom {@code <updated>} element. */ - public String updated; - - /** - * Instantiates the XMLStreamReader, advances to the root element and - * validates the root document structure. This initialization code is shared - * by the {@code AtomFeed} and {@code AtomEntry} parsers. - * - * @param input The input stream. - * @return An {@code XMLStreamReader} initialized reader, advanced to the - * first element of the document. - */ - protected static XMLStreamReader createReader(InputStream input) { - XMLInputFactory factory = XMLInputFactory.newInstance(); - - // The Atom parser assumes that all adjacent text nodes are coalesced - factory.setProperty(XMLInputFactory.IS_COALESCING, true); - factory.setProperty(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, true); - - XMLStreamReader reader; - try { - reader = factory.createXMLStreamReader(input); - } - catch (XMLStreamException e) { - throw new RuntimeException(e.getMessage(), e); - } - - assert reader.getEventType() == XMLStreamConstants.START_DOCUMENT; - - // Scan ahead to first element - scanTag(reader); - - return reader; - } - - /** - * Initialize a property of the current instance based on the given XML - * element. - * - * @param reader The XML reader. - */ - void init(XMLStreamReader reader) { - assert reader.isStartElement(); - - String name = reader.getLocalName(); - - if (name.equals("id")) { - this.id = parseText(reader); - } - else if (name.equals("link")) { - String rel = reader.getAttributeValue(null, "rel"); - String href = reader.getAttributeValue(null, "href"); - this.links.put(rel, href); - parseEnd(reader); - } - else if (name.equals("title")) { - this.title = parseText(reader); - } - else if (name.equals("updated")) { - this.updated = parseText(reader); - } - else { - parseEnd(reader); // Ignore - } - } - - /** - * Initializes the current instance from the given XML element by calling - * the {@code init} method on each child of the XML element. - * - * @param reader The XML reader. - */ - void load(XMLStreamReader reader, String localName) { - assert isStartElement(reader, localName); - - String name = reader.getLocalName(); - - scan(reader); - while (reader.isStartElement()) { - init(reader); - } - - if (!isEndElement(reader, name)) - syntaxError(reader); - - scan(reader); // Consume the end element - } - - /** - * Parses the element at the current cursor position and reads the - * corresponding end element. - * - * @param reader The XML reader. - */ - protected void parseEnd(XMLStreamReader reader) { - scanEnd(reader); // Scan ahead to the end element - scan(reader); // Consume the end element - } - - /** - * Parses and returns the text value of the element at the current cursor - * position and reads the corresponding end element. - * - * @param reader The XML reader. - * @return The element's text value. - */ - protected String parseText(XMLStreamReader reader) { - assert reader.isStartElement(); - - String name = reader.getLocalName(); - - String value = getElementText(reader); - - if (!isEndElement(reader, name)) - syntaxError(reader); - - scan(reader); // Consume the end element - - return value; - } - - // - // Lexical helpers - // - - protected static String getElementText(XMLStreamReader reader) { - try { - return reader.getElementText(); - } - catch (XMLStreamException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - protected static boolean - isEndElement(XMLStreamReader reader, String localName) { - return reader.isEndElement() - && reader.getLocalName().equals(localName); - } - - protected static boolean - isStartElement(XMLStreamReader reader, String localName) { - return reader.isStartElement() - && reader.getLocalName().equals(localName); - } - - // Scan ahead to the next token, skipping whitespace - protected static void scan(XMLStreamReader reader) { - assert !reader.isWhiteSpace(); // current should never be white - try { - do { - reader.next(); - } - while (reader.isWhiteSpace()); // Ignore whitespace - } - catch (XMLStreamException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - // Scan ahead to the end element that matches the current start element. - // Note: function returns cursor located at matching end element. - protected static void scanEnd(XMLStreamReader reader) { - assert reader.isStartElement(); - - String name = reader.getLocalName(); - - while (true) { - scan(reader); - - switch (reader.getEventType()) { - case XMLStreamConstants.CHARACTERS: - continue; - - case XMLStreamConstants.START_ELEMENT: - scanEnd(reader); - continue; - - case XMLStreamConstants.END_ELEMENT: - if (!reader.getLocalName().equals(name)) - syntaxError(reader); - return; - - default: - syntaxError(reader); - } - } - } - - // Scan ahead until the next start tag. - protected static void scanTag(XMLStreamReader reader) { - try { - reader.nextTag(); - } - catch (XMLStreamException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - // Raises a Syntax error runtime exception - protected static void syntaxError(XMLStreamReader reader) { - Location location = reader.getLocation(); - String where = location.toString(); - String message = String.format("Syntax error @ %s", where); - throw new RuntimeException(message); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; +import javax.xml.stream.Location; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; +import javax.xml.stream.XMLStreamConstants; + +/** + * The {@code AtomObject} class represents a generic Atom object. This class is + * a common base class shared by {@code AtomFeed} and {@code AtomEntry}. + */ +public class AtomObject { + /** The value of the Atom {@code <id>} element. */ + public String id; + + /** The value of the {@code <link>} elements in this {@code AtomObject}. */ + public Map<String, String> links = new HashMap<>(); + + /** The value of the Atom {@code <title>} element. */ + public String title; + + /** The value of the Atom {@code <updated>} element. */ + public String updated; + + /** + * Instantiates the XMLStreamReader, advances to the root element and + * validates the root document structure. This initialization code is shared + * by the {@code AtomFeed} and {@code AtomEntry} parsers. + * + * @param input The input stream. + * @return An {@code XMLStreamReader} initialized reader, advanced to the + * first element of the document. + */ + protected static XMLStreamReader createReader(InputStream input) { + XMLInputFactory factory = XMLInputFactory.newInstance(); + + // The Atom parser assumes that all adjacent text nodes are coalesced + factory.setProperty(XMLInputFactory.IS_COALESCING, true); + factory.setProperty(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, true); + + XMLStreamReader reader; + try { + reader = factory.createXMLStreamReader(input); + } + catch (XMLStreamException e) { + throw new RuntimeException(e.getMessage(), e); + } + + assert reader.getEventType() == XMLStreamConstants.START_DOCUMENT; + + // Scan ahead to first element + scanTag(reader); + + return reader; + } + + /** + * Initialize a property of the current instance based on the given XML + * element. + * + * @param reader The XML reader. + */ + void init(XMLStreamReader reader) { + assert reader.isStartElement(); + + String name = reader.getLocalName(); + + if (name.equals("id")) { + this.id = parseText(reader); + } + else if (name.equals("link")) { + String rel = reader.getAttributeValue(null, "rel"); + String href = reader.getAttributeValue(null, "href"); + this.links.put(rel, href); + parseEnd(reader); + } + else if (name.equals("title")) { + this.title = parseText(reader); + } + else if (name.equals("updated")) { + this.updated = parseText(reader); + } + else { + parseEnd(reader); // Ignore + } + } + + /** + * Initializes the current instance from the given XML element by calling + * the {@code init} method on each child of the XML element. + * + * @param reader The XML reader. + */ + void load(XMLStreamReader reader, String localName) { + assert isStartElement(reader, localName); + + String name = reader.getLocalName(); + + scan(reader); + while (reader.isStartElement()) { + init(reader); + } + + if (!isEndElement(reader, name)) + syntaxError(reader); + + scan(reader); // Consume the end element + } + + /** + * Parses the element at the current cursor position and reads the + * corresponding end element. + * + * @param reader The XML reader. + */ + protected void parseEnd(XMLStreamReader reader) { + scanEnd(reader); // Scan ahead to the end element + scan(reader); // Consume the end element + } + + /** + * Parses and returns the text value of the element at the current cursor + * position and reads the corresponding end element. + * + * @param reader The XML reader. + * @return The element's text value. + */ + protected String parseText(XMLStreamReader reader) { + assert reader.isStartElement(); + + String name = reader.getLocalName(); + + String value = getElementText(reader); + + if (!isEndElement(reader, name)) + syntaxError(reader); + + scan(reader); // Consume the end element + + return value; + } + + // + // Lexical helpers + // + + protected static String getElementText(XMLStreamReader reader) { + try { + return reader.getElementText(); + } + catch (XMLStreamException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + protected static boolean + isEndElement(XMLStreamReader reader, String localName) { + return reader.isEndElement() + && reader.getLocalName().equals(localName); + } + + protected static boolean + isStartElement(XMLStreamReader reader, String localName) { + return reader.isStartElement() + && reader.getLocalName().equals(localName); + } + + // Scan ahead to the next token, skipping whitespace + protected static void scan(XMLStreamReader reader) { + assert !reader.isWhiteSpace(); // current should never be white + try { + do { + reader.next(); + } + while (reader.isWhiteSpace()); // Ignore whitespace + } + catch (XMLStreamException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + // Scan ahead to the end element that matches the current start element. + // Note: function returns cursor located at matching end element. + protected static void scanEnd(XMLStreamReader reader) { + assert reader.isStartElement(); + + String name = reader.getLocalName(); + + while (true) { + scan(reader); + + switch (reader.getEventType()) { + case XMLStreamConstants.CHARACTERS: + continue; + + case XMLStreamConstants.START_ELEMENT: + scanEnd(reader); + continue; + + case XMLStreamConstants.END_ELEMENT: + if (!reader.getLocalName().equals(name)) + syntaxError(reader); + return; + + default: + syntaxError(reader); + } + } + } + + // Scan ahead until the next start tag. + protected static void scanTag(XMLStreamReader reader) { + try { + reader.nextTag(); + } + catch (XMLStreamException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + // Raises a Syntax error runtime exception + protected static void syntaxError(XMLStreamReader reader) { + Location location = reader.getLocation(); + String where = location.toString(); + String message = String.format("Syntax error @ %s", where); + throw new RuntimeException(message); + } +} diff --git a/splunk/src/main/java/com/splunk/Command.java b/splunk/src/main/java/com/splunk/Command.java index 4dbd39b6..267e3b15 100644 --- a/splunk/src/main/java/com/splunk/Command.java +++ b/splunk/src/main/java/com/splunk/Command.java @@ -1,244 +1,238 @@ -/* - * Copyright 2011 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionBuilder; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.commons.cli.PosixParser; - -/** - * This class serves as an example and is unsupported. - * - * Processes and capture command options and arguments - */ -public class Command { - private String appName; - private Options rules = new Options(); - - // The parsed command line arguments - public String[] args = new String[0]; - - // The parsed command line options (flags) - public HashMap<String, Object> opts = new HashMap<String, Object>(); - - // Whether or not this is a help request - public Boolean help = false; - - public static final HashMap<String, Object> defaultValues = new HashMap<String, Object>(); - { - defaultValues.put("scheme", "https"); - defaultValues.put("host", "localhost"); - defaultValues.put("port", 8089); - } - - Command(String appName) { - this.appName = appName; - } - - public static Command create() { - return create(null); - } - - public static Command create(String appName) { - return new Command(appName); - } - - public static void error(String message, Object... args) { - System.err.format("Error: %s\n", String.format(message, args)); - System.exit(2); - } - - public Options getRules() { - return this.rules; - } - - // Initialize with default Splunk command options. - @SuppressWarnings("static-access") // OptionBuilder API requires this - public Command init() { - rules.addOption("h", "help", false, "Display this help message"); - rules.addOption(null, "host", true, "Host name (default localhost)"); - rules.addOption(OptionBuilder - .withLongOpt("port") - .hasArg(true) - .withType(Integer.class) - .create()); - rules.addOption(null, "scheme", true, "Scheme (default https)"); - rules.addOption(null, "username", true, "Username to login with"); - rules.addOption(null, "password", true, "Password to login with"); - rules.addOption(null, "app", true, "App/namespace context"); - rules.addOption(null, "owner", true, "Owner/user context"); - // This is here only for compatibility with the JavaScript SDK's .splunkrc. - rules.addOption(null, "version", true, "Version (irrelevant for Java)"); - return this; - } - - public Command addRule(String name, String description) { - rules.addOption(null, name, false, description); - return this; - } - - @SuppressWarnings("static-access") // OptionBuilder API requires this - public Command addRule(String name, Class argType, String description) { - rules.addOption( - OptionBuilder - .withLongOpt(name) - .hasArg(true) - .withType(argType) - .withDescription(description) - .create()); - return this; - } - - // Load a file of options and arguments - public Command load(String path) { - ArrayList<String> argList = new ArrayList<String>(); - - try { - FileReader fileReader = new FileReader(path); - try { - BufferedReader reader = new BufferedReader(fileReader); - while (true) { - String line; - line = reader.readLine(); - if (line == null) - break; - if (line.startsWith("#")) - continue; - line = line.trim(); - if (line.length() == 0) - continue; - if (!line.startsWith("-")) - line = "--" + line; - argList.add(line); - } - } - finally { - fileReader.close(); - } - } - catch (IOException e) { - error(e.getMessage()); - return this; - } - - parse(argList.toArray(new String[argList.size()])); - return this; - } - - // Parse the given argument vector - public Command parse(String[] argv) { - CommandLineParser parser = new PosixParser(); - - CommandLine cmdline = null; - try { - cmdline = parser.parse(this.rules, argv); - } - catch (ParseException e) { - error(e.getMessage()); - } - - // Unpack the cmdline into a simple Map of options and optionally - // assign values to any corresponding fields found in the Command class. - for (Option option : cmdline.getOptions()) { - String name = option.getLongOpt(); - Object value = option.getValue(); - - // Figure out the type of the option and convert the value. - if (!option.hasArg()) { - // If it has no arg, then its implicitly boolean and presence - // of the argument indicates truth. - value = true; - } - else { - Class type = (Class)option.getType(); - if (type == null) { - // Null implies String, no conversion necessary - } - else if (type == Integer.class) { - value = Integer.parseInt((String)value); - } - else { - assert false; // Unsupported type - } - } - - this.opts.put(name, value); - - // Look for a field of the Command class (or subclass) that - // matches the long name of the option and, if found, assign the - // corresponding option value in order to provide simplified - // access to command options. - try { - java.lang.reflect.Field field = this.getClass().getField(name); - field.set(this, value); - } - catch (NoSuchFieldException e) { continue; } - catch (IllegalAccessException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - String[] orig = this.args; - String[] more = cmdline.getArgs(); - this.args = new String[orig.length + more.length]; - System.arraycopy(orig, 0, this.args, 0, orig.length); - System.arraycopy(more, 0, this.args, orig.length, more.length); - - if (this.help) { - printHelp(); - System.exit(0); - } - - return this; - } - - public void printHelp() { - HelpFormatter formatter = new HelpFormatter(); - String appName = this.appName == null ? "App" : this.appName; - formatter.printHelp(appName, this.rules); - } - - public static Command splunk() { - return splunk(null); - } - - // Creates a command instance, initializes with the default Splunk - // command line rules and attempts to load the default options file. - public static Command splunk(String appName) { - return Command.create(appName).init().splunkrc(); - } - - // Load the default options file (.splunkrc) if it exists - public Command splunkrc() { - this.opts.putAll(defaultValues); - load(System.getProperty("user.home") + File.separator + ".splunkrc"); - return this; - } -} - +/* + * Copyright 2011 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.OptionBuilder; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.cli.PosixParser; + +/** + * This class serves as an example and is unsupported. + * + * Processes and capture command options and arguments + */ +public class Command { + private String appName; + private Options rules = new Options(); + + // The parsed command line arguments + public String[] args = new String[0]; + + // The parsed command line options (flags) + public HashMap<String, Object> opts = new HashMap<>(); + + // Whether or not this is a help request + public Boolean help = false; + + public static final HashMap<String, Object> defaultValues = new HashMap<>(); + { + defaultValues.put("scheme", "https"); + defaultValues.put("host", "localhost"); + defaultValues.put("port", 8089); + } + + Command(String appName) { + this.appName = appName; + } + + public static Command create() { + return create(null); + } + + public static Command create(String appName) { + return new Command(appName); + } + + public static void error(String message, Object... args) { + System.err.format("Error: %s\n", String.format(message, args)); + System.exit(2); + } + + public Options getRules() { + return this.rules; + } + + // Initialize with default Splunk command options. + @SuppressWarnings("static-access") // OptionBuilder API requires this + public Command init() { + rules.addOption("h", "help", false, "Display this help message"); + rules.addOption(null, "host", true, "Host name (default localhost)"); + rules.addOption(OptionBuilder + .withLongOpt("port") + .hasArg(true) + .withType(Integer.class) + .create()); + rules.addOption(null, "scheme", true, "Scheme (default https)"); + rules.addOption(null, "username", true, "Username to login with"); + rules.addOption(null, "password", true, "Password to login with"); + rules.addOption(null, "app", true, "App/namespace context"); + rules.addOption(null, "owner", true, "Owner/user context"); + // This is here only for compatibility with the JavaScript SDK's .splunkrc. + rules.addOption(null, "version", true, "Version (irrelevant for Java)"); + return this; + } + + public Command addRule(String name, String description) { + rules.addOption(null, name, false, description); + return this; + } + + @SuppressWarnings("static-access") // OptionBuilder API requires this + public Command addRule(String name, Class argType, String description) { + rules.addOption( + OptionBuilder + .withLongOpt(name) + .hasArg(true) + .withType(argType) + .withDescription(description) + .create()); + return this; + } + + // Load a file of options and arguments + public Command load(String path) { + ArrayList<String> argList = new ArrayList<>(); + + try (FileReader fileReader = new FileReader(path); + BufferedReader reader = new BufferedReader(fileReader);) { + while (true) { + String line; + line = reader.readLine(); + if (line == null) + break; + if (line.startsWith("#")) + continue; + line = line.trim(); + if (line.length() == 0) + continue; + if (!line.startsWith("-")) + line = "--" + line; + argList.add(line); + } + } + catch (IOException e) { + error(e.getMessage()); + return this; + } + + parse(argList.toArray(new String[argList.size()])); + return this; + } + + // Parse the given argument vector + public Command parse(String[] argv) { + CommandLineParser parser = new PosixParser(); + + CommandLine cmdline = null; + try { + cmdline = parser.parse(this.rules, argv); + } + catch (ParseException e) { + error(e.getMessage()); + } + + // Unpack the cmdline into a simple Map of options and optionally + // assign values to any corresponding fields found in the Command class. + for (Option option : cmdline.getOptions()) { + String name = option.getLongOpt(); + Object value = option.getValue(); + + // Figure out the type of the option and convert the value. + if (!option.hasArg()) { + // If it has no arg, then its implicitly boolean and presence + // of the argument indicates truth. + value = true; + } + else { + Class type = (Class)option.getType(); + if (type == null) { + // Null implies String, no conversion necessary + } + else if (type == Integer.class) { + value = Integer.parseInt((String)value); + } + else { + assert false; // Unsupported type + } + } + + this.opts.put(name, value); + + // Look for a field of the Command class (or subclass) that + // matches the long name of the option and, if found, assign the + // corresponding option value in order to provide simplified + // access to command options. + try { + java.lang.reflect.Field field = this.getClass().getField(name); + field.set(this, value); + } + catch (NoSuchFieldException e) { continue; } + catch (IllegalAccessException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + String[] orig = this.args; + String[] more = cmdline.getArgs(); + this.args = new String[orig.length + more.length]; + System.arraycopy(orig, 0, this.args, 0, orig.length); + System.arraycopy(more, 0, this.args, orig.length, more.length); + + if (this.help) { + printHelp(); + System.exit(0); + } + + return this; + } + + public void printHelp() { + HelpFormatter formatter = new HelpFormatter(); + String appName = this.appName == null ? "App" : this.appName; + formatter.printHelp(appName, this.rules); + } + + public static Command splunk() { + return splunk(null); + } + + // Creates a command instance, initializes with the default Splunk + // command line rules and attempts to load the default options file. + public static Command splunk(String appName) { + return Command.create(appName).init().splunkrc(); + } + + // Load the default options file (.splunkrc) if it exists + public Command splunkrc() { + this.opts.putAll(defaultValues); + load(System.getProperty("user.home") + File.separator + ".splunkrc"); + return this; + } +} + diff --git a/splunk/src/main/java/com/splunk/DataModel.java b/splunk/src/main/java/com/splunk/DataModel.java index ce42f6f9..7711f0b4 100644 --- a/splunk/src/main/java/com/splunk/DataModel.java +++ b/splunk/src/main/java/com/splunk/DataModel.java @@ -1,302 +1,302 @@ -/* - * Copyright 2014 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import com.google.gson.*; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -/** - * DataModel represents a data model on the server. Data models contain - * data model objects, which specify structured views on Splunk data. - */ -public class DataModel extends Entity { - private final static JsonParser jsonParser = new JsonParser(); - private final static Gson gson = new Gson(); - - private static final String ACCELERATION_LABEL = "acceleration"; - private static final String MODEL_NAME_LABEL = "modelName"; - private static final String DISPLAY_NAME_LABEL = "displayName"; - private static final String DESCRIPTION_LABEL = "description"; - private static final String RAW_JSON_LABEL = "description"; // Yes, this is insane. - - // Human readable description, as opposed to the raw JSON, which is also called 'description' - private String description; - - private Map<String, DataModelObject> objects; - private boolean accelerationEnabled; - private String earliestAcceleratedTime; - private String accelerationCronSchedule; - private boolean manualRebuilds; - - DataModel(Service service, String path) { - super(service, path); - // The data provided by the collection is incomplete. Go ahead and refresh so we don't - // have to worry about it. - this.refresh(); - } - - /** - * Returns whether there is an object of the given name in this data model. - * - * @param name Name of the object to check for. - * @return true if there is an object with that name; false otherwise. - */ - public boolean containsObject(String name) { - return this.objects.containsKey(name); - } - - /** - * Retrieve an object by name from this data model. - * - * @param name Name of the object to retrieve. - * @return a DataModelObject if there is such an object; null otherwise. - */ - public DataModelObject getObject(String name) { - return this.objects.get(name); - } - - /** - * @return a collection of all objects in this data model. - */ - public Collection<DataModelObject> getObjects() { - return Collections.unmodifiableCollection(objects.values()); - } - - /** - * Returns the tsidx namespace which holds global acceleration events for this - * data model. The namespace will be returned whether acceleration is enabled - * or not. - * - * @return The tsidx namespace for global acceleration of this data model. - */ - public String getAccelerationNamespace() { - // For the moment, the acceleration namespace for global acceleration of - // data models is the name of the data model. - return getName(); - } - - /** - * @return whether global acceleration is enabled for this data model. - */ - public boolean isAccelerated() { - return this.accelerationEnabled; - } - - /** - * @return A human readable description of this data model. - */ - public String getDescription() { - return this.description; - } - - /** - * @return The raw JSON describing this data model and its objects. - */ - public String getRawJson() { - return getString(RAW_JSON_LABEL); - } - - /** - * @return the human readable name of this data model. - */ - public String getDisplayName() { - return getString(DISPLAY_NAME_LABEL); - } - - @Override - Entity load(AtomObject value) { - Entity result = super.load(value); - // After loading the Atom entity as we would for any other Splunk entity, - // we have to parse the JSON description of the data model and its acceleration - // status. - parseDescription(getString(RAW_JSON_LABEL)); - parseAcceleration(getString(ACCELERATION_LABEL)); - return result; - } - - /** - * Parse the JSON returned from splunkd describing this data model. - * - * This method writes the results into fields of this object. - * - * @param input a String containing JSON. - */ - private void parseDescription(String input) { - objects = new HashMap<String, DataModelObject>(); - - JsonElement rootElement = jsonParser.parse(input); - - for (Entry<String, JsonElement> entry : rootElement.getAsJsonObject().entrySet()) { - if (entry.getKey().equals(MODEL_NAME_LABEL)) { - content.put(MODEL_NAME_LABEL, entry.getValue().getAsString()); - } else if (entry.getKey().equals(DISPLAY_NAME_LABEL)) { - content.put(DISPLAY_NAME_LABEL, entry.getValue().getAsString()); - } else if (entry.getKey().equals(DESCRIPTION_LABEL)) { - description = entry.getValue().getAsString(); - } else if (entry.getKey().equals("objects")) { - JsonArray objectArray = entry.getValue().getAsJsonArray(); - for (JsonElement object : objectArray) { - DataModelObject dmo = DataModelObject.parse(this, object); - objects.put(dmo.getName(), dmo); - } - } else { - // Allow new keys without complaining - } - } - } - - /** - * Parse the acceleration description from splunkd of this data model. - * - * This method writes the results into fields of this object. - * - * @param input a string containing JSON. - */ - private void parseAcceleration(String input) { - JsonElement rootElement = jsonParser.parse(input); - - for (Entry<String, JsonElement> entry : rootElement. getAsJsonObject().entrySet()) { - if (entry.getKey().equals("enabled")) { - // API is broken in 6.1. It returns 1 instead of true (but does return false). - if (((JsonPrimitive)entry.getValue()).isBoolean()) { - accelerationEnabled = entry.getValue().getAsBoolean(); - } else if (((JsonPrimitive)entry.getValue()).isNumber()) { - accelerationEnabled = entry.getValue().getAsInt() != 0; - } else { - throw new RuntimeException("splunkd returned an unknown value " + entry.getValue().toString() + - " for whether acceleration is enabled."); - } - } else if (entry.getKey().equals("earliest_time")) { - earliestAcceleratedTime = entry.getValue().getAsString(); - } else if (entry.getKey().equals("cron_schedule")) { - accelerationCronSchedule = entry.getValue().getAsString(); - } else if (entry.getKey().equals("manual_rebuilds")) { - if (((JsonPrimitive)entry.getValue()).isBoolean()) { - manualRebuilds = entry.getValue().getAsBoolean(); - } else if (((JsonPrimitive)entry.getValue()).isNumber()) { - manualRebuilds = entry.getValue().getAsInt() != 0; - } else { - throw new RuntimeException("splunkd returned an unknown value " + entry.getValue().toString() + - " for whether manual_rebuilds is enabled."); - } - } else { - // Allow new keys without complaining - } - } - } - - /** - * Enable or disable global acceleration on this data model. - * - * @param enabled true enabled, false disables. - */ - public void setAcceleration(boolean enabled) { - this.accelerationEnabled = enabled; - toUpdate.put("enabled", enabled); - } - - /** - * Return the earliest time of the window over which the data model is accelerated. - * - * Times are represented relative to now, given by a minus sign, a number, and a - * suffix indicating the time unit (e.g., "-2mon", "-1day"). - * - * @return a string representing the earliest accelerated time. - */ - public String getEarliestAcceleratedTime() { - return earliestAcceleratedTime; - } - - /** - * Set the size of the window (from the specified earliest time to now) over - * which the data model should be accelerated. - * - * Times are represented relative to now, given by a minus sign, a number, and a - * suffix indicating the time unit (e.g., "-2mon", "-1day"). - * - * @param earliestAcceleratedTime a string specifying a time. - */ - public void setEarliestAcceleratedTime(String earliestAcceleratedTime) { - this.earliestAcceleratedTime = earliestAcceleratedTime; - toUpdate.put("earliest_time", earliestAcceleratedTime); - } - - /** - * Return the cron schedule on which the cached data for acceleration should be - * updated. - * - * @return a string containing a crontab style schedule specification. - */ - public String getAccelerationCronSchedule() { - return accelerationCronSchedule; - } - - /** - * Set the cron schedule on which the cached data for the acceleration should - * be updated. - * - * @param accelerationCronSchedule a crontab style schedule to use. - */ - public void setAccelerationCronSchedule(String accelerationCronSchedule) { - this.accelerationCronSchedule = accelerationCronSchedule; - toUpdate.put("cron_schedule", accelerationCronSchedule); - } - - /** - * This setting prevents outdated summaries from being rebuilt by the - * 'summarize' command. - * - * @return whether manual rebuilds are enabled for this data model. - */ - public boolean isManualRebuilds() { - return this.manualRebuilds; - } - - /** - * Enable or disable manual rebuilds on this data model. - * - * @param enabled true enabled, false disables. - */ - public void setManualRebuilds(boolean enabled) { - this.manualRebuilds = enabled; - toUpdate.put("manual_rebuilds", enabled); - } - - @Override - public void update() { - // We have to do some munging on the acceleration fields to pass them as JSON - // to the server. - Map<String, Object> accelerationMap = new HashMap<String, Object>(); - for (String key : new String[] {"enabled", "earliest_time", "cron_schedule", "manual_rebuilds"}) { - if (toUpdate.containsKey(key)) { - accelerationMap.put(key, toUpdate.get(key)); - toUpdate.remove(key); - } - } - - if (!accelerationMap.isEmpty()) { - toUpdate.put("acceleration", gson.toJson(accelerationMap)); - } - - // Now update like we would any other entity. - super.update(); - } -} +/* + * Copyright 2014 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import com.google.gson.*; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * DataModel represents a data model on the server. Data models contain + * data model objects, which specify structured views on Splunk data. + */ +public class DataModel extends Entity { + private final static JsonParser jsonParser = new JsonParser(); + private final static Gson gson = new Gson(); + + private static final String ACCELERATION_LABEL = "acceleration"; + private static final String MODEL_NAME_LABEL = "modelName"; + private static final String DISPLAY_NAME_LABEL = "displayName"; + private static final String DESCRIPTION_LABEL = "description"; + private static final String RAW_JSON_LABEL = "description"; // Yes, this is insane. + + // Human readable description, as opposed to the raw JSON, which is also called 'description' + private String description; + + private Map<String, DataModelObject> objects; + private boolean accelerationEnabled; + private String earliestAcceleratedTime; + private String accelerationCronSchedule; + private boolean manualRebuilds; + + DataModel(Service service, String path) { + super(service, path); + // The data provided by the collection is incomplete. Go ahead and refresh so we don't + // have to worry about it. + this.refresh(); + } + + /** + * Returns whether there is an object of the given name in this data model. + * + * @param name Name of the object to check for. + * @return true if there is an object with that name; false otherwise. + */ + public boolean containsObject(String name) { + return this.objects.containsKey(name); + } + + /** + * Retrieve an object by name from this data model. + * + * @param name Name of the object to retrieve. + * @return a DataModelObject if there is such an object; null otherwise. + */ + public DataModelObject getObject(String name) { + return this.objects.get(name); + } + + /** + * @return a collection of all objects in this data model. + */ + public Collection<DataModelObject> getObjects() { + return Collections.unmodifiableCollection(objects.values()); + } + + /** + * Returns the tsidx namespace which holds global acceleration events for this + * data model. The namespace will be returned whether acceleration is enabled + * or not. + * + * @return The tsidx namespace for global acceleration of this data model. + */ + public String getAccelerationNamespace() { + // For the moment, the acceleration namespace for global acceleration of + // data models is the name of the data model. + return getName(); + } + + /** + * @return whether global acceleration is enabled for this data model. + */ + public boolean isAccelerated() { + return this.accelerationEnabled; + } + + /** + * @return A human readable description of this data model. + */ + public String getDescription() { + return this.description; + } + + /** + * @return The raw JSON describing this data model and its objects. + */ + public String getRawJson() { + return getString(RAW_JSON_LABEL); + } + + /** + * @return the human readable name of this data model. + */ + public String getDisplayName() { + return getString(DISPLAY_NAME_LABEL); + } + + @Override + Entity load(AtomObject value) { + Entity result = super.load(value); + // After loading the Atom entity as we would for any other Splunk entity, + // we have to parse the JSON description of the data model and its acceleration + // status. + parseDescription(getString(RAW_JSON_LABEL)); + parseAcceleration(getString(ACCELERATION_LABEL)); + return result; + } + + /** + * Parse the JSON returned from splunkd describing this data model. + * + * This method writes the results into fields of this object. + * + * @param input a String containing JSON. + */ + private void parseDescription(String input) { + objects = new HashMap<>(); + + JsonElement rootElement = jsonParser.parse(input); + + for (Entry<String, JsonElement> entry : rootElement.getAsJsonObject().entrySet()) { + if (entry.getKey().equals(MODEL_NAME_LABEL)) { + content.put(MODEL_NAME_LABEL, entry.getValue().getAsString()); + } else if (entry.getKey().equals(DISPLAY_NAME_LABEL)) { + content.put(DISPLAY_NAME_LABEL, entry.getValue().getAsString()); + } else if (entry.getKey().equals(DESCRIPTION_LABEL)) { + description = entry.getValue().getAsString(); + } else if (entry.getKey().equals("objects")) { + JsonArray objectArray = entry.getValue().getAsJsonArray(); + for (JsonElement object : objectArray) { + DataModelObject dmo = DataModelObject.parse(this, object); + objects.put(dmo.getName(), dmo); + } + } else { + // Allow new keys without complaining + } + } + } + + /** + * Parse the acceleration description from splunkd of this data model. + * + * This method writes the results into fields of this object. + * + * @param input a string containing JSON. + */ + private void parseAcceleration(String input) { + JsonElement rootElement = jsonParser.parse(input); + + for (Entry<String, JsonElement> entry : rootElement. getAsJsonObject().entrySet()) { + if (entry.getKey().equals("enabled")) { + // API is broken in 6.1. It returns 1 instead of true (but does return false). + if (((JsonPrimitive)entry.getValue()).isBoolean()) { + accelerationEnabled = entry.getValue().getAsBoolean(); + } else if (((JsonPrimitive)entry.getValue()).isNumber()) { + accelerationEnabled = entry.getValue().getAsInt() != 0; + } else { + throw new RuntimeException("splunkd returned an unknown value " + entry.getValue().toString() + + " for whether acceleration is enabled."); + } + } else if (entry.getKey().equals("earliest_time")) { + earliestAcceleratedTime = entry.getValue().getAsString(); + } else if (entry.getKey().equals("cron_schedule")) { + accelerationCronSchedule = entry.getValue().getAsString(); + } else if (entry.getKey().equals("manual_rebuilds")) { + if (((JsonPrimitive)entry.getValue()).isBoolean()) { + manualRebuilds = entry.getValue().getAsBoolean(); + } else if (((JsonPrimitive)entry.getValue()).isNumber()) { + manualRebuilds = entry.getValue().getAsInt() != 0; + } else { + throw new RuntimeException("splunkd returned an unknown value " + entry.getValue().toString() + + " for whether manual_rebuilds is enabled."); + } + } else { + // Allow new keys without complaining + } + } + } + + /** + * Enable or disable global acceleration on this data model. + * + * @param enabled true enabled, false disables. + */ + public void setAcceleration(boolean enabled) { + this.accelerationEnabled = enabled; + toUpdate.put("enabled", enabled); + } + + /** + * Return the earliest time of the window over which the data model is accelerated. + * + * Times are represented relative to now, given by a minus sign, a number, and a + * suffix indicating the time unit (e.g., "-2mon", "-1day"). + * + * @return a string representing the earliest accelerated time. + */ + public String getEarliestAcceleratedTime() { + return earliestAcceleratedTime; + } + + /** + * Set the size of the window (from the specified earliest time to now) over + * which the data model should be accelerated. + * + * Times are represented relative to now, given by a minus sign, a number, and a + * suffix indicating the time unit (e.g., "-2mon", "-1day"). + * + * @param earliestAcceleratedTime a string specifying a time. + */ + public void setEarliestAcceleratedTime(String earliestAcceleratedTime) { + this.earliestAcceleratedTime = earliestAcceleratedTime; + toUpdate.put("earliest_time", earliestAcceleratedTime); + } + + /** + * Return the cron schedule on which the cached data for acceleration should be + * updated. + * + * @return a string containing a crontab style schedule specification. + */ + public String getAccelerationCronSchedule() { + return accelerationCronSchedule; + } + + /** + * Set the cron schedule on which the cached data for the acceleration should + * be updated. + * + * @param accelerationCronSchedule a crontab style schedule to use. + */ + public void setAccelerationCronSchedule(String accelerationCronSchedule) { + this.accelerationCronSchedule = accelerationCronSchedule; + toUpdate.put("cron_schedule", accelerationCronSchedule); + } + + /** + * This setting prevents outdated summaries from being rebuilt by the + * 'summarize' command. + * + * @return whether manual rebuilds are enabled for this data model. + */ + public boolean isManualRebuilds() { + return this.manualRebuilds; + } + + /** + * Enable or disable manual rebuilds on this data model. + * + * @param enabled true enabled, false disables. + */ + public void setManualRebuilds(boolean enabled) { + this.manualRebuilds = enabled; + toUpdate.put("manual_rebuilds", enabled); + } + + @Override + public void update() { + // We have to do some munging on the acceleration fields to pass them as JSON + // to the server. + Map<String, Object> accelerationMap = new HashMap<>(); + for (String key : new String[] {"enabled", "earliest_time", "cron_schedule", "manual_rebuilds"}) { + if (toUpdate.containsKey(key)) { + accelerationMap.put(key, toUpdate.get(key)); + toUpdate.remove(key); + } + } + + if (!accelerationMap.isEmpty()) { + toUpdate.put("acceleration", gson.toJson(accelerationMap)); + } + + // Now update like we would any other entity. + super.update(); + } +} diff --git a/splunk/src/main/java/com/splunk/DataModelCalculation.java b/splunk/src/main/java/com/splunk/DataModelCalculation.java index 499c0584..a8ba5afe 100644 --- a/splunk/src/main/java/com/splunk/DataModelCalculation.java +++ b/splunk/src/main/java/com/splunk/DataModelCalculation.java @@ -1,166 +1,165 @@ -/* - * Copyright 2014 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ -package com.splunk; - -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; - -import java.util.*; -import java.util.Map.Entry; - -/** - * Abstract class specifying a calculation on a data model object. - */ -public abstract class DataModelCalculation { - private final String[] ownerLineage; - private final String calculationID; - private final Map<String, DataModelField> generatedFields; - private final String comment; - private final boolean editable; - - protected DataModelCalculation(String[] ownerLineage, String calculationID, - Map<String, DataModelField> generatedFields, String comment, boolean editable) { - this.ownerLineage = ownerLineage; - this.calculationID = calculationID; - this.generatedFields = generatedFields; - this.comment = comment; - this.editable = editable; - } - - /** - * @return the ID of this calculation. - */ - public String getCalculationID() { return this.calculationID; } - - /** - * @param fieldName Name of the field to fetch. - * @return whether this calculation generated a field of the given name. - */ - public boolean containsGeneratedField(String fieldName) { - return this.generatedFields.containsKey(fieldName); - } - - /** - * @return a collection of the fields this calculation generates. - */ - public Collection<DataModelField> getGeneratedFields() { - return Collections.unmodifiableCollection(this.generatedFields.values()); - } - - /** - * @param fieldName Name of the field to fetch. - * @return a DataModelField object. - */ - public DataModelField getGeneratedField(String fieldName) { return this.generatedFields.get(fieldName); } - - /** - * @return the comment on this calculation (if one is specified) or null. - */ - public String getComment() { return this.comment; } - - /** - * Returns the name of the object on which this calculation is defined. - * That need not be the one you accessed it from, as it may be inherited from - * another data model object. - * - * @return The name of the object on which this calculation is defined. - */ - public String getOwner() { return this.ownerLineage[this.ownerLineage.length-1]; } - - /** - * Return the lineage of the data model object on which this calculation is - * defined, starting with the most remote ancestor and ending with the data model object - * on which this calculation is defined. - * - * @return an array of the names of data model objects. - */ - public String[] getLineage() { return this.ownerLineage; } - - /** - * @return whether this calculation can be edited, or it is a system defined calculation. - */ - public boolean isEditable() { return this.editable; } - - static DataModelCalculation parse(JsonElement json) { - String type = null; - String calculationId = null; - List<LookupDataModelCalculation.LookupFieldMapping> lookupInputs = - new ArrayList<LookupDataModelCalculation.LookupFieldMapping>(); - String comment = null; - String expression = null; - String lookupName = null; - String lookupField = null; // We need lookupField and inputField to handle the case in Splunk 6.0 - String inputField = null; // where there is only one entry, and it's not in an array. - String[] owner = new String[0]; // Should always be set below - boolean editable = false; - Map<String, DataModelField> outputFields = new HashMap<String, DataModelField>(); - - String key; - for (Entry<String, JsonElement> entry : json.getAsJsonObject().entrySet()) { - key = entry.getKey(); - if (key.equals("calculationType")) { - type = entry.getValue().getAsString().toLowerCase(); - } else if (key.equals("calculationID")) { - calculationId = entry.getValue().getAsString(); - } else if (key.equals("outputFields")) { - for (JsonElement e : entry.getValue().getAsJsonArray()) { - DataModelField f = DataModelField.parse(e.getAsJsonObject()); - outputFields.put(f.getName(), f); - } - } else if (key.equals("lookupInputs")) { - for (JsonElement lookupInputJsonElement : entry.getValue().getAsJsonArray()) { - if (!(lookupInputJsonElement instanceof JsonObject)) { - throw new RuntimeException("Expected a JSON object for lookupInput entry."); - } - JsonObject lookupInputJson = (JsonObject)lookupInputJsonElement; - LookupDataModelCalculation.LookupFieldMapping mapping = new LookupDataModelCalculation.LookupFieldMapping(); - mapping.inputField = lookupInputJson.get("inputField").getAsString(); - mapping.lookupField = lookupInputJson.get("lookupField").getAsString(); - lookupInputs.add(mapping); - } - } else if (key.equals("inputField")) { - inputField = entry.getValue().getAsString(); - } else if (key.equals("comment")) { - comment = entry.getValue().getAsString(); - } else if (key.equals("expression")) { - expression = entry.getValue().getAsString(); - } else if (key.equals("lookupName")) { - lookupName = entry.getValue().getAsString(); - } else if (key.equals("lookupField")) { - lookupField = entry.getValue().getAsString(); - } else if (key.equals("owner")) { - owner = entry.getValue().getAsString().split("\\."); - } else if (key.equals("editable")) { - editable = entry.getValue().getAsBoolean(); - } - } - - DataModelCalculation c; - if (type.equals("lookup")) { - c = new LookupDataModelCalculation(owner, calculationId, outputFields, comment, editable, lookupName, lookupInputs); - } else if (type.equals("geoip")) { - c = new GeoIPDataModelCalculation(owner, calculationId, outputFields, comment, editable, inputField); - } else if (type.equals("eval")) { - c = new EvalDataModelCalculation(owner, calculationId, outputFields, comment, editable, expression); - } else if (type.equals("rex")) { - c = new RegexpDataModelCalculation(owner, calculationId, outputFields, comment, editable, inputField, expression); - } else { - throw new IllegalStateException("Unknown calculation type: " + type); - } - - return c; - } -} +/* + * Copyright 2014 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.splunk; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +import java.util.*; +import java.util.Map.Entry; + +/** + * Abstract class specifying a calculation on a data model object. + */ +public abstract class DataModelCalculation { + private final String[] ownerLineage; + private final String calculationID; + private final Map<String, DataModelField> generatedFields; + private final String comment; + private final boolean editable; + + protected DataModelCalculation(String[] ownerLineage, String calculationID, + Map<String, DataModelField> generatedFields, String comment, boolean editable) { + this.ownerLineage = ownerLineage; + this.calculationID = calculationID; + this.generatedFields = generatedFields; + this.comment = comment; + this.editable = editable; + } + + /** + * @return the ID of this calculation. + */ + public String getCalculationID() { return this.calculationID; } + + /** + * @param fieldName Name of the field to fetch. + * @return whether this calculation generated a field of the given name. + */ + public boolean containsGeneratedField(String fieldName) { + return this.generatedFields.containsKey(fieldName); + } + + /** + * @return a collection of the fields this calculation generates. + */ + public Collection<DataModelField> getGeneratedFields() { + return Collections.unmodifiableCollection(this.generatedFields.values()); + } + + /** + * @param fieldName Name of the field to fetch. + * @return a DataModelField object. + */ + public DataModelField getGeneratedField(String fieldName) { return this.generatedFields.get(fieldName); } + + /** + * @return the comment on this calculation (if one is specified) or null. + */ + public String getComment() { return this.comment; } + + /** + * Returns the name of the object on which this calculation is defined. + * That need not be the one you accessed it from, as it may be inherited from + * another data model object. + * + * @return The name of the object on which this calculation is defined. + */ + public String getOwner() { return this.ownerLineage[this.ownerLineage.length-1]; } + + /** + * Return the lineage of the data model object on which this calculation is + * defined, starting with the most remote ancestor and ending with the data model object + * on which this calculation is defined. + * + * @return an array of the names of data model objects. + */ + public String[] getLineage() { return this.ownerLineage; } + + /** + * @return whether this calculation can be edited, or it is a system defined calculation. + */ + public boolean isEditable() { return this.editable; } + + static DataModelCalculation parse(JsonElement json) { + String type = null; + String calculationId = null; + List<LookupDataModelCalculation.LookupFieldMapping> lookupInputs = + new ArrayList<>(); + String comment = null; + String expression = null; + String lookupName = null; + String lookupField = null; // We need lookupField and inputField to handle the case in Splunk 6.0 + String inputField = null; // where there is only one entry, and it's not in an array. + String[] owner = new String[0]; // Should always be set below + boolean editable = false; + Map<String, DataModelField> outputFields = new HashMap<>(); + + String key; + for (Entry<String, JsonElement> entry : json.getAsJsonObject().entrySet()) { + key = entry.getKey(); + if (key.equals("calculationType")) { + type = entry.getValue().getAsString().toLowerCase(); + } else if (key.equals("calculationID")) { + calculationId = entry.getValue().getAsString(); + } else if (key.equals("outputFields")) { + for (JsonElement e : entry.getValue().getAsJsonArray()) { + DataModelField f = DataModelField.parse(e.getAsJsonObject()); + outputFields.put(f.getName(), f); + } + } else if (key.equals("lookupInputs")) { + for (JsonElement lookupInputJsonElement : entry.getValue().getAsJsonArray()) { + if (!(lookupInputJsonElement instanceof JsonObject lookupInputJson)) { + throw new RuntimeException("Expected a JSON object for lookupInput entry."); + } + LookupDataModelCalculation.LookupFieldMapping mapping = new LookupDataModelCalculation.LookupFieldMapping(); + mapping.inputField = lookupInputJson.get("inputField").getAsString(); + mapping.lookupField = lookupInputJson.get("lookupField").getAsString(); + lookupInputs.add(mapping); + } + } else if (key.equals("inputField")) { + inputField = entry.getValue().getAsString(); + } else if (key.equals("comment")) { + comment = entry.getValue().getAsString(); + } else if (key.equals("expression")) { + expression = entry.getValue().getAsString(); + } else if (key.equals("lookupName")) { + lookupName = entry.getValue().getAsString(); + } else if (key.equals("lookupField")) { + lookupField = entry.getValue().getAsString(); + } else if (key.equals("owner")) { + owner = entry.getValue().getAsString().split("\\."); + } else if (key.equals("editable")) { + editable = entry.getValue().getAsBoolean(); + } + } + + DataModelCalculation c; + if (type.equals("lookup")) { + c = new LookupDataModelCalculation(owner, calculationId, outputFields, comment, editable, lookupName, lookupInputs); + } else if (type.equals("geoip")) { + c = new GeoIPDataModelCalculation(owner, calculationId, outputFields, comment, editable, inputField); + } else if (type.equals("eval")) { + c = new EvalDataModelCalculation(owner, calculationId, outputFields, comment, editable, expression); + } else if (type.equals("rex")) { + c = new RegexpDataModelCalculation(owner, calculationId, outputFields, comment, editable, inputField, expression); + } else { + throw new IllegalStateException("Unknown calculation type: " + type); + } + + return c; + } +} diff --git a/splunk/src/main/java/com/splunk/DataModelObject.java b/splunk/src/main/java/com/splunk/DataModelObject.java index 75c84d88..f95d98ef 100644 --- a/splunk/src/main/java/com/splunk/DataModelObject.java +++ b/splunk/src/main/java/com/splunk/DataModelObject.java @@ -1,394 +1,394 @@ -/* - * Copyright 2014 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ -package com.splunk; - -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; - -import java.util.*; -import java.util.Map.Entry; - -/** - * DataModelObject represents one of the structured views in a data model. - */ -public class DataModelObject { - private DataModel model; - private String name; - private String[] lineage; - private String displayName; - private String parentName; - - private Map<String, DataModelField> autoextractedFields; - private Collection<DataModelConstraint> constraints; - private Map<String, DataModelCalculation> calculations; - - protected DataModelObject(DataModel model) { - this.model = model; - } - - /** - * Checks whether there is a field with the given name in this - * data model object. - * - * @param fieldName name of the field to check for. - * @return true if there is such a field; false otherwise. - */ - public boolean containsField(String fieldName) { - if (autoextractedFields.containsKey(fieldName)) { - return true; - } - for (DataModelCalculation c : calculations.values()) { - if (c.containsGeneratedField(fieldName)) { - return true; - } - } - return false; - } - - /** - * Local acceleration is tsidx acceleration of a data model object that is handled - * manually by a user. You create a job which generates an index, and then use that - * index in your pivots on the data model object. - * - * The namespace created by the job is 'sid={sid}' where {sid} is the job's sid. You - * would use it in another job by starting your search query with - * - * | tstats ... from sid={sid} | ... - * - * The tsidx index created by this job is deleted when the job is garbage collected - * by Splunk. - * - * It is the user's responsibility to manage this job, including cancelling it. - * - * @return a Job writing a tsidx index. - */ - public Job createLocalAccelerationJob() { - return createLocalAccelerationJob(null); - } - - /** - * Local acceleration is tsidx acceleration of a data model object that is handled - * manually by a user. You create a job which generates an index, and then use that - * index in your pivots on the data model object. - * - * The namespace created by the job is 'sid={sid}' where {sid} is the job's sid. You - * would use it in another job by starting your search query with - * - * | tstats ... from sid={sid} | ... - * - * The tsidx index created by this job is deleted when the job is garbage collected - * by Splunk. - * - * It is the user's responsibility to manage this job, including cancelling it. - * - * @param earliestTime A time modifier (e.g., "-2w") setting the earliest time to index. - * @return a Job writing a tsidx index. - */ - public Job createLocalAccelerationJob(String earliestTime) { - String query = "| datamodel " + this.model.getName() + " " + - this.getName() + " search | tscollect"; - JobArgs args = new JobArgs(); - if (earliestTime != null) { - args.setEarliestTime(earliestTime); - } - return this.model.getService().search(query, args); - } - - /** - * Return the calculations done by this data model object to produce fields. - * - * Each calculation has a unique ID assigned to it by splunkd, which is the key - * in the returned map. For most purposes you will probably only want the values. - * - * @return a map of calculation IDs to DataModelCalculation objects. - */ - public Map<String, DataModelCalculation> getCalculations() { - return Collections.unmodifiableMap(this.calculations); - } - - /** - * Fetch a calculation by its unique ID. - * - * @param calculationId a splunkd assigned unique ID for this calculation. - * @return a DataModelCalculation object. - */ - public DataModelCalculation getCalculation(String calculationId) { - return this.calculations.get(calculationId); - } - - /** - * @return a collection of the constraints limiting events that will appear in this data model object. - */ - public Collection<DataModelConstraint> getConstraints() { - return Collections.unmodifiableCollection(this.constraints); - } - - /** - * Fetch the data model on which this object is defined. - * - * @return A DataModel instance containing this object. - */ - public DataModel getDataModel() { - return this.model; - } - - /** - * @return the human readable name of this data model object. - */ - public String getDisplayName() { - return this.displayName; - } - - /** - * Fetch a single field of a given name from this data model object. - * - * @param fieldName Name of the field to fetch. - * @return A DataModelField object, or null if there is no field of the given name. - */ - public DataModelField getField(String fieldName) { - if (autoextractedFields.containsKey(fieldName)) { - return autoextractedFields.get(fieldName); - } - for (DataModelCalculation c : this.calculations.values()) { - if (c.containsGeneratedField(fieldName)) { - return c.getGeneratedField(fieldName); - } - } - return null; - } - - /** - * Get a collection of objects specifying all the fields that were automatically extracted - * from events (as opposed to generated by calculations in a data model). - * - * @return a collection of DataModelField objects. - */ - public Collection<DataModelField> getAutoExtractedFields() { - return Collections.unmodifiableCollection(autoextractedFields.values()); - } - - /** - * Return all the fields, whether input or created by calculations. - * @return a collection of DataModelField objects. - */ - public Collection<DataModelField> getFields() { - Collection<DataModelField> fields = new ArrayList<DataModelField>(); - fields.addAll(this.autoextractedFields.values()); - for (DataModelCalculation c : this.calculations.values()) { - fields.addAll(c.getGeneratedFields()); - } - return fields; - } - - public String getQuery() { - return "| datamodel " + this.getDataModel().getName() + " " + this.getName() + " search"; - } - - /** - * @return Splunk's identifier for this data model object. - */ - public String getName() { return this.name; } - - /** - * Data model objects can inherit from other data model objects - * in the same data model (or from a couple of global base objects - * such as BaseEvent and BaseTransaction). The lineage is a list of - * data model object names tracing this inheritance, starting with the - * most remote ancestor and ending with this object. - * - * @return An array of names, starting with this object's name, followed by - * the names up the hierarchy. - */ - public String[] getLineage() { return this.lineage; } - - /** - * Returns the name of the parent of this object. - * - * @return a String giving the name. - */ - public String getParentName() { - return this.parentName; - } - - /** - * @return the data model object this one inherits from if it is a user defined data model object - * in the same data model; otherwise returns null (for example if the data model object inherits from BaseEvent - * or BaseTransaction). - */ - public DataModelObject getParent() { - return this.getDataModel().getObject(this.parentName); - } - - /** - * Create a PivotSpecification on this data model object. - * - * @return a PivotSpecification instance. - */ - public PivotSpecification createPivotSpecification() { - return new PivotSpecification(this); - } - - /** - * Start a job that fetches all the events of this data model object. - * - * @return a Job object. - */ - public Job runQuery() { - return runQuery("", null); - } - - /** - * Start a job that fetches all the events of this data model object. - * - * @param args arguments specifying behavior of the job. - * @return a Job object. - */ - public Job runQuery(JobArgs args) { - return runQuery("", args); - } - - /** - * Start a job that applies querySuffix to all the events in this data model object. - * - * @param querySuffix a search query, starting with a '|' that will be appended to the command to fetch - * the contents of this data model object (e.g., "| head 3"). - * @return a Job object. - */ - public Job runQuery(String querySuffix) { - return runQuery(querySuffix, null); - } - - /** - * Start a job that applies querySuffix to all the events in this data model object. - * - * @param querySuffix a search query, starting with a '|' that will be appended to the command to fetch - * the contents of this data model object (e.g., "| head 3"). - * @param args arguments to control the job. - * @return a Job object. - */ - public Job runQuery(String querySuffix, JobArgs args) { - return getDataModel().getService().search(getQuery() + querySuffix, args); - } - - /** - * Produce a data model object from a JSON dictionary specifying it plus a data model that contains it. - - * @param dataModel a DataModel instance that contains this data model object. - * @param object a JsonElement (as produced by Gson) specifying this data model object (usually one of - * the entries in the array of objects in the JSON description of the data model). - * @return a DataModelObject instance. - */ - static DataModelObject parse(DataModel dataModel, JsonElement object) { - String name = null; - String displayName = null; - String comment = null; - String[] lineage = new String[0]; - String parentName = null; - Map<String, DataModelField> fields = new HashMap<String, DataModelField>(); - Collection<String> children = new ArrayList<String>(); - Collection<DataModelConstraint> constraints = new ArrayList<DataModelConstraint>(); - Map<String, DataModelCalculation> calculations = new HashMap<String, DataModelCalculation>(); - - // Fields specific to objects inheriting directly from BaseSearch. - String baseSearch = null; - // Fields specific to objects inheriting directly from BaseTransaction - String transactionMaxPause = null; - String transactionMaxTimeSpan = null; - Collection<String> groupByFields = new ArrayList<String>(); - Collection<String> objectsToGroup = new ArrayList<String>(); - - for (Entry<String, JsonElement> entry : object.getAsJsonObject().entrySet()) { - if (entry.getKey().equals("objectName")) { - name = entry.getValue().getAsString(); - } else if (entry.getKey().equals("displayName")) { - displayName = entry.getValue().getAsString(); - } else if (entry.getKey().equals("lineage")) { - lineage = entry.getValue().getAsString().split("\\."); - } else if (entry.getKey().equals("parentName")) { - parentName = entry.getValue().getAsString(); - } else if (entry.getKey().equals("fields")) { - JsonArray fieldsJson = entry.getValue().getAsJsonArray(); - fields.clear(); - - for (JsonElement fieldJson : fieldsJson) { - DataModelField field = DataModelField.parse(fieldJson); - fields.put(field.getName(), field); - } - } else if (entry.getKey().equals("constraints")) { - JsonArray constraintsJson = entry.getValue().getAsJsonArray(); - - for (JsonElement constraintJson : constraintsJson) { - DataModelConstraint constraint = DataModelConstraint.parse(constraintJson); - constraints.add(constraint); - } - } else if (entry.getKey().equals("calculations")) { - calculations.clear(); - for (JsonElement cjson : entry.getValue().getAsJsonArray()) { - DataModelCalculation c = DataModelCalculation.parse(cjson); - String cid = c.getCalculationID(); - calculations.put(cid, c); - } - } else if (entry.getKey().equals("baseSearch")) { - baseSearch = entry.getValue().getAsString(); - } else if (entry.getKey().equals("transactionMaxPause")) { - transactionMaxPause = entry.getValue().getAsString(); - } else if (entry.getKey().equals("transactionMaxTimeSpan")) { - transactionMaxTimeSpan = entry.getValue().getAsString(); - } else if (entry.getKey().equals("groupByFields")) { - for (JsonElement e : entry.getValue().getAsJsonArray()) { - groupByFields.add(e.getAsString()); - } - } else if (entry.getKey().equals("objectsToGroup")) { - for (JsonElement e : entry.getValue().getAsJsonArray()) { - objectsToGroup.add(e.getAsString()); - } - } - } - - DataModelObject dmo; - // Create the right subclass of DataModelObject. - if (baseSearch != null) { - dmo = new DataModelSearch(dataModel); - } else if (transactionMaxPause != null) { - dmo = new DataModelTransaction(dataModel); - } else { - dmo = new DataModelObject(dataModel); - } - - // Set the fields common to all data model objects - dmo.name = name; - dmo.displayName = displayName; - dmo.lineage = lineage; - dmo.parentName = parentName; - dmo.autoextractedFields = fields; - dmo.constraints = constraints; - dmo.calculations = calculations; - - // Set the fields of particular types - if (baseSearch != null) { - ((DataModelSearch)dmo).baseSearch = baseSearch; - } else if (transactionMaxPause != null) { - ((DataModelTransaction)dmo).groupByFields = groupByFields; - ((DataModelTransaction)dmo).objectsToGroup = objectsToGroup; - ((DataModelTransaction)dmo).maxPause = transactionMaxPause; - ((DataModelTransaction)dmo).maxSpan = transactionMaxTimeSpan; - } else { - // Has no additional fields - } - - return dmo; - } -} +/* + * Copyright 2014 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.splunk; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; + +import java.util.*; +import java.util.Map.Entry; + +/** + * DataModelObject represents one of the structured views in a data model. + */ +public class DataModelObject { + private DataModel model; + private String name; + private String[] lineage; + private String displayName; + private String parentName; + + private Map<String, DataModelField> autoextractedFields; + private Collection<DataModelConstraint> constraints; + private Map<String, DataModelCalculation> calculations; + + protected DataModelObject(DataModel model) { + this.model = model; + } + + /** + * Checks whether there is a field with the given name in this + * data model object. + * + * @param fieldName name of the field to check for. + * @return true if there is such a field; false otherwise. + */ + public boolean containsField(String fieldName) { + if (autoextractedFields.containsKey(fieldName)) { + return true; + } + for (DataModelCalculation c : calculations.values()) { + if (c.containsGeneratedField(fieldName)) { + return true; + } + } + return false; + } + + /** + * Local acceleration is tsidx acceleration of a data model object that is handled + * manually by a user. You create a job which generates an index, and then use that + * index in your pivots on the data model object. + * + * The namespace created by the job is 'sid={sid}' where {sid} is the job's sid. You + * would use it in another job by starting your search query with + * + * | tstats ... from sid={sid} | ... + * + * The tsidx index created by this job is deleted when the job is garbage collected + * by Splunk. + * + * It is the user's responsibility to manage this job, including cancelling it. + * + * @return a Job writing a tsidx index. + */ + public Job createLocalAccelerationJob() { + return createLocalAccelerationJob(null); + } + + /** + * Local acceleration is tsidx acceleration of a data model object that is handled + * manually by a user. You create a job which generates an index, and then use that + * index in your pivots on the data model object. + * + * The namespace created by the job is 'sid={sid}' where {sid} is the job's sid. You + * would use it in another job by starting your search query with + * + * | tstats ... from sid={sid} | ... + * + * The tsidx index created by this job is deleted when the job is garbage collected + * by Splunk. + * + * It is the user's responsibility to manage this job, including cancelling it. + * + * @param earliestTime A time modifier (e.g., "-2w") setting the earliest time to index. + * @return a Job writing a tsidx index. + */ + public Job createLocalAccelerationJob(String earliestTime) { + String query = "| datamodel " + this.model.getName() + " " + + this.getName() + " search | tscollect"; + JobArgs args = new JobArgs(); + if (earliestTime != null) { + args.setEarliestTime(earliestTime); + } + return this.model.getService().search(query, args); + } + + /** + * Return the calculations done by this data model object to produce fields. + * + * Each calculation has a unique ID assigned to it by splunkd, which is the key + * in the returned map. For most purposes you will probably only want the values. + * + * @return a map of calculation IDs to DataModelCalculation objects. + */ + public Map<String, DataModelCalculation> getCalculations() { + return Collections.unmodifiableMap(this.calculations); + } + + /** + * Fetch a calculation by its unique ID. + * + * @param calculationId a splunkd assigned unique ID for this calculation. + * @return a DataModelCalculation object. + */ + public DataModelCalculation getCalculation(String calculationId) { + return this.calculations.get(calculationId); + } + + /** + * @return a collection of the constraints limiting events that will appear in this data model object. + */ + public Collection<DataModelConstraint> getConstraints() { + return Collections.unmodifiableCollection(this.constraints); + } + + /** + * Fetch the data model on which this object is defined. + * + * @return A DataModel instance containing this object. + */ + public DataModel getDataModel() { + return this.model; + } + + /** + * @return the human readable name of this data model object. + */ + public String getDisplayName() { + return this.displayName; + } + + /** + * Fetch a single field of a given name from this data model object. + * + * @param fieldName Name of the field to fetch. + * @return A DataModelField object, or null if there is no field of the given name. + */ + public DataModelField getField(String fieldName) { + if (autoextractedFields.containsKey(fieldName)) { + return autoextractedFields.get(fieldName); + } + for (DataModelCalculation c : this.calculations.values()) { + if (c.containsGeneratedField(fieldName)) { + return c.getGeneratedField(fieldName); + } + } + return null; + } + + /** + * Get a collection of objects specifying all the fields that were automatically extracted + * from events (as opposed to generated by calculations in a data model). + * + * @return a collection of DataModelField objects. + */ + public Collection<DataModelField> getAutoExtractedFields() { + return Collections.unmodifiableCollection(autoextractedFields.values()); + } + + /** + * Return all the fields, whether input or created by calculations. + * @return a collection of DataModelField objects. + */ + public Collection<DataModelField> getFields() { + Collection<DataModelField> fields = new ArrayList<>(); + fields.addAll(this.autoextractedFields.values()); + for (DataModelCalculation c : this.calculations.values()) { + fields.addAll(c.getGeneratedFields()); + } + return fields; + } + + public String getQuery() { + return "| datamodel " + this.getDataModel().getName() + " " + this.getName() + " search"; + } + + /** + * @return Splunk's identifier for this data model object. + */ + public String getName() { return this.name; } + + /** + * Data model objects can inherit from other data model objects + * in the same data model (or from a couple of global base objects + * such as BaseEvent and BaseTransaction). The lineage is a list of + * data model object names tracing this inheritance, starting with the + * most remote ancestor and ending with this object. + * + * @return An array of names, starting with this object's name, followed by + * the names up the hierarchy. + */ + public String[] getLineage() { return this.lineage; } + + /** + * Returns the name of the parent of this object. + * + * @return a String giving the name. + */ + public String getParentName() { + return this.parentName; + } + + /** + * @return the data model object this one inherits from if it is a user defined data model object + * in the same data model; otherwise returns null (for example if the data model object inherits from BaseEvent + * or BaseTransaction). + */ + public DataModelObject getParent() { + return this.getDataModel().getObject(this.parentName); + } + + /** + * Create a PivotSpecification on this data model object. + * + * @return a PivotSpecification instance. + */ + public PivotSpecification createPivotSpecification() { + return new PivotSpecification(this); + } + + /** + * Start a job that fetches all the events of this data model object. + * + * @return a Job object. + */ + public Job runQuery() { + return runQuery("", null); + } + + /** + * Start a job that fetches all the events of this data model object. + * + * @param args arguments specifying behavior of the job. + * @return a Job object. + */ + public Job runQuery(JobArgs args) { + return runQuery("", args); + } + + /** + * Start a job that applies querySuffix to all the events in this data model object. + * + * @param querySuffix a search query, starting with a '|' that will be appended to the command to fetch + * the contents of this data model object (e.g., "| head 3"). + * @return a Job object. + */ + public Job runQuery(String querySuffix) { + return runQuery(querySuffix, null); + } + + /** + * Start a job that applies querySuffix to all the events in this data model object. + * + * @param querySuffix a search query, starting with a '|' that will be appended to the command to fetch + * the contents of this data model object (e.g., "| head 3"). + * @param args arguments to control the job. + * @return a Job object. + */ + public Job runQuery(String querySuffix, JobArgs args) { + return getDataModel().getService().search(getQuery() + querySuffix, args); + } + + /** + * Produce a data model object from a JSON dictionary specifying it plus a data model that contains it. + + * @param dataModel a DataModel instance that contains this data model object. + * @param object a JsonElement (as produced by Gson) specifying this data model object (usually one of + * the entries in the array of objects in the JSON description of the data model). + * @return a DataModelObject instance. + */ + static DataModelObject parse(DataModel dataModel, JsonElement object) { + String name = null; + String displayName = null; + String comment = null; + String[] lineage = new String[0]; + String parentName = null; + Map<String, DataModelField> fields = new HashMap<>(); + Collection<String> children = new ArrayList<>(); + Collection<DataModelConstraint> constraints = new ArrayList<>(); + Map<String, DataModelCalculation> calculations = new HashMap<>(); + + // Fields specific to objects inheriting directly from BaseSearch. + String baseSearch = null; + // Fields specific to objects inheriting directly from BaseTransaction + String transactionMaxPause = null; + String transactionMaxTimeSpan = null; + Collection<String> groupByFields = new ArrayList<>(); + Collection<String> objectsToGroup = new ArrayList<>(); + + for (Entry<String, JsonElement> entry : object.getAsJsonObject().entrySet()) { + if (entry.getKey().equals("objectName")) { + name = entry.getValue().getAsString(); + } else if (entry.getKey().equals("displayName")) { + displayName = entry.getValue().getAsString(); + } else if (entry.getKey().equals("lineage")) { + lineage = entry.getValue().getAsString().split("\\."); + } else if (entry.getKey().equals("parentName")) { + parentName = entry.getValue().getAsString(); + } else if (entry.getKey().equals("fields")) { + JsonArray fieldsJson = entry.getValue().getAsJsonArray(); + fields.clear(); + + for (JsonElement fieldJson : fieldsJson) { + DataModelField field = DataModelField.parse(fieldJson); + fields.put(field.getName(), field); + } + } else if (entry.getKey().equals("constraints")) { + JsonArray constraintsJson = entry.getValue().getAsJsonArray(); + + for (JsonElement constraintJson : constraintsJson) { + DataModelConstraint constraint = DataModelConstraint.parse(constraintJson); + constraints.add(constraint); + } + } else if (entry.getKey().equals("calculations")) { + calculations.clear(); + for (JsonElement cjson : entry.getValue().getAsJsonArray()) { + DataModelCalculation c = DataModelCalculation.parse(cjson); + String cid = c.getCalculationID(); + calculations.put(cid, c); + } + } else if (entry.getKey().equals("baseSearch")) { + baseSearch = entry.getValue().getAsString(); + } else if (entry.getKey().equals("transactionMaxPause")) { + transactionMaxPause = entry.getValue().getAsString(); + } else if (entry.getKey().equals("transactionMaxTimeSpan")) { + transactionMaxTimeSpan = entry.getValue().getAsString(); + } else if (entry.getKey().equals("groupByFields")) { + for (JsonElement e : entry.getValue().getAsJsonArray()) { + groupByFields.add(e.getAsString()); + } + } else if (entry.getKey().equals("objectsToGroup")) { + for (JsonElement e : entry.getValue().getAsJsonArray()) { + objectsToGroup.add(e.getAsString()); + } + } + } + + DataModelObject dmo; + // Create the right subclass of DataModelObject. + if (baseSearch != null) { + dmo = new DataModelSearch(dataModel); + } else if (transactionMaxPause != null) { + dmo = new DataModelTransaction(dataModel); + } else { + dmo = new DataModelObject(dataModel); + } + + // Set the fields common to all data model objects + dmo.name = name; + dmo.displayName = displayName; + dmo.lineage = lineage; + dmo.parentName = parentName; + dmo.autoextractedFields = fields; + dmo.constraints = constraints; + dmo.calculations = calculations; + + // Set the fields of particular types + if (baseSearch != null) { + ((DataModelSearch)dmo).baseSearch = baseSearch; + } else if (transactionMaxPause != null) { + ((DataModelTransaction)dmo).groupByFields = groupByFields; + ((DataModelTransaction)dmo).objectsToGroup = objectsToGroup; + ((DataModelTransaction)dmo).maxPause = transactionMaxPause; + ((DataModelTransaction)dmo).maxSpan = transactionMaxTimeSpan; + } else { + // Has no additional fields + } + + return dmo; + } +} diff --git a/splunk/src/main/java/com/splunk/Entity.java b/splunk/src/main/java/com/splunk/Entity.java index a4856e2a..12cbbe6f 100644 --- a/splunk/src/main/java/com/splunk/Entity.java +++ b/splunk/src/main/java/com/splunk/Entity.java @@ -1,493 +1,493 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.*; - -/** - * The {@code Entity} class represents a Splunk entity. - */ -public class Entity extends Resource implements Map<String, Object> { - protected Record content; - protected HashMap<String, Object> toUpdate = new LinkedHashMap<String, Object>(); - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The entity's endpoint. - */ - public Entity(Service service, String path) { - super(service, path); - } - - /** - * Returns the path that corresponds to the requested action. - * - * @param action The requested action. - * @return The return path. - */ - protected String actionPath(String action) { - if (action.equals("disable")) - return path + "/disable"; - if (action.equals("edit")) - return path; - if (action.equals("enable")) - return path + "/enable"; - if (action.equals("remove")) - return path; - if (action.equals("acl")) - return path + "/acl"; - throw new IllegalArgumentException("Invalid action: " + action); - } - - /** {@inheritDoc} */ - public void clear() { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - public boolean containsKey(Object key) { - return getContent().containsKey(key); - } - - /** {@inheritDoc} */ - public boolean containsValue(Object value) { - return getContent().containsValue(value); - } - - /** - * Disables the entity that is named by this endpoint. This method is - * available on almost every endpoint. - */ - public void disable() { - service.post(actionPath("disable")); - invalidate(); - } - - /** - * Enables the entity that is named by this endpoint. This method is - * available on almost every endpoint. - */ - public void enable() { - service.post(actionPath("enable")); - invalidate(); - } - - /** {@inheritDoc} */ - public Set<Map.Entry<String, Object>> entrySet() { - return getContent().entrySet(); - } - - /** {@inheritDoc} */ - public Object get(Object key) { - if (toUpdate.containsKey(key)) return toUpdate.get(key); - return getContent().get(key); - } - - /** - * Returns the Boolean value associated with the specified key. Values - * can be converted from: 0, 1, true, false. - * - * @param key The key to look up. - * @return The Boolean value associated with the specified key. - */ - boolean getBoolean(String key) { - if (toUpdate.containsKey(key)) - return Value.toBoolean(toUpdate.get(key).toString()); - return getContent().getBoolean(key); - } - - /** - * Returns the Boolean value associated with the specified key, or the - * default value if the key does not exist. Boolean values can be converted - * from: 0, 1, true, false. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The Boolean value associated with the specified key. - */ - boolean getBoolean(String key, boolean defaultValue) { - if (toUpdate.containsKey(key)) - return Value.toBoolean(toUpdate.get(key).toString()); - return getContent().getBoolean(key, defaultValue); - } - - /** - * Returns the long value associated with the specified key. Long values - * can be converted from: number, numberMB, numberGB. - * - * @param key The key to look up. - * @return The long value associated with the specified key. - */ - long getByteCount(String key) { - if (toUpdate.containsKey(key)) - return Value.toByteCount(toUpdate.get(key).toString()); - return getContent().getByteCount(key); - } - - /** - * Returns the long value associated with the specified key, or the default - * value if the key does not exist. Long values can be converted from: - * number, numberMB, numberGB. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The long value associated with the specified key. - */ - long getByteCount(String key, long defaultValue) { - if (toUpdate.containsKey(key)) - return Value.toByteCount(toUpdate.get(key).toString()); - return getContent().getByteCount(key, defaultValue); - } - - protected Record getContent() { - return validate().content; - } - - /** - * Returns a date value associated with the specified key. Date values can - * be converted from standard UTC time formats. - * - * @param key The key to look up. - * @return The date value associated with the specified key. - */ - Date getDate(String key) { - if (toUpdate.containsKey(key)) - return Value.toDate(toUpdate.get(key).toString()); - if (getContent().containsKey(key)) { - return getContent().getDate(key); - } else { - return null; - } - } - - /** - * Returns a date value associated with the specified key, or the default - * value if the key does not exist. Date values can be converted from - * standard UTC time formats. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The date value associated with the specified key. - */ - Date getDate(String key, Date defaultValue) { - if (toUpdate.containsKey(key)) - return Value.toDate(toUpdate.get(key).toString()); - return getContent().getDate(key, defaultValue); - } - - /** - * Returns the floating point value associated with the specified key. - * - * @param key The key to look up. - * @return The floating point value associated with the specified key. - */ - float getFloat(String key) { - if (toUpdate.containsKey(key)) - return Value.toFloat(toUpdate.get(key).toString()); - return getContent().getFloat(key); - } - - /** - * Returns the integer point value associated with the specified key. - * - * @param key The key to look up. - * @return The integer point value associated with the specified key. - */ - int getInteger(String key) { - if (toUpdate.containsKey(key)) - return Value.toInteger(toUpdate.get(key).toString()); - return getContent().getInteger(key); - } - - /** - * Returns the integer value associated with the specified key. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The integer value associated with the specified key. - */ - int getInteger(String key, int defaultValue) { - if (toUpdate.containsKey(key)) - return Value.toInteger(toUpdate.get(key).toString()); - return getContent().getInteger(key, defaultValue); - } - - /** - * Returns the long value associated with the specified key. - * - * @param key The key to look up. - * @return The long value associated with the specified key. - */ - long getLong(String key) { - if (toUpdate.containsKey(key)) - return Value.toLong(toUpdate.get(key).toString()); - return getContent().getLong(key); - } - - /** - * Returns the long value associated with the specified key. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The long value associated with the specified key. - */ - long getLong(String key, int defaultValue) { - if (toUpdate.containsKey(key)) - return Value.toLong(toUpdate.get(key).toString()); - return getContent().getLong(key, defaultValue); - } - - /** - * Returns the metadata (eai:acl) of this entity. This data includes - * permissions for accessing the resource, and values that indicate - * which resource fields are wildcards, required, and optional. - * - * @return The metadata of this entity, or {@code null} if none exist. - */ - public EntityMetadata getMetadata() { - // CONSIDER: For entities that don't have an eai:acl field, which is - // uncommon but does happen at least in the case of a DeploymentClient - // that is not enabled, we return null. A slightly friendlier option - // would be to return a metadata instance that defaults all values? - if (!containsKey("eai:acl")) return null; - return new EntityMetadata(this); - } - - /** - * Returns the string value associated with the specified key. - * - * @param key The key to look up. - * @return The string value associated with the specified key. - */ - String getString(String key) { - if (toUpdate.containsKey(key)) - return toUpdate.get(key).toString(); - return getContent().getString(key); - } - - /** - * Returns the string value associated with the specified key, or the - * default value if the key does not exist. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The string value associated with the specified key. - */ - String getString(String key, String defaultValue) { - if (toUpdate.containsKey(key)) - return toUpdate.get(key).toString(); - return getContent().getString(key, defaultValue); - } - - /** - * Returns the string array value associated with the specified key. - * - * @param key The key to look up. - * @return The string array value associated with the specified key. - */ - String[] getStringArray(String key) { - if (toUpdate.containsKey(key)) { - return ((String)toUpdate.get(key)).split("\\|"); - } - return getContent().getStringArray(key); - } - - /** - * Returns the string array value associated with the specified key, or the - * default value if the key does not exist. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The string array value associated with the specified key. - */ - String[] getStringArray(String key, String[] defaultValue) { - if (toUpdate.containsKey(key)) - return getStringArray(key); - return getContent().getStringArray(key, defaultValue); - } - - /** {@inheritDoc} */ - public boolean isEmpty() { - return getContent().isEmpty(); - } - - /** - * Indicates whether this entity is disabled. This method is - * available on almost every endpoint. - * - * @return {@code true} if this entity is disabled, {@code false} if - * enabled. - */ - public boolean isDisabled() { - return getBoolean("disabled", false); - } - - /** - * Returns whether this entity's name can be changed via {@link #update}. - * - * Most entity names cannot be changed in this way. - * @return false. - */ - protected boolean isNameChangeAllowed() { - return false; - } - - /** {@inheritDoc} */ - public Set<String> keySet() { - return getContent().keySet(); - } - - @Override - Entity load(AtomObject value) { - super.load(value); - AtomEntry entry = (AtomEntry)value; - if (entry == null) { - content = new Record(); - } - else { - content = entry.content; - } - return this; - } - - /** {@inheritDoc} */ - public Object put(String key, Object value) { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - public void putAll(Map<? extends String, ? extends Object> map) { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - @Override public Entity refresh() { - // Update any attribute values set by a setter method that has not - // yet been written to the object. - ResponseMessage response = service.get(path); - assert(response.getStatus() == 200); - AtomFeed feed; - try { - feed = AtomFeed.parseStream(response.getContent()); - } catch (Exception e) { - throw new RuntimeException(e); - } - int count = feed.entries.size(); - if (count > 1) { - throw new IllegalStateException("Expected 0 or 1 Atom entries; found " + feed.entries.size()); - } - AtomEntry entry = count == 0 ? null : feed.entries.get(0); - load(entry); - return this; - } - - /** {@inheritDoc} */ - public Object remove(Object key) { - throw new UnsupportedOperationException(); - } - - /** - * Sets the local cache update value. Writing is deferred until - * {@code update} has been called. - * - * @param key The key to set. - * @param value The default value. - */ - void setCacheValue(String key, Object value) { - toUpdate.put(key, value); - } - - /** {@inheritDoc} */ - public int size() { - return getContent().size(); - } - - /** - * Updates the entity with the values you previously set using the setter - * methods, and any additional specified arguments. The specified arguments - * take precedent over the values that were set using the setter methods. - * - * @param args The arguments to update. - */ - public void update(Map<String, Object> args) { - if (!toUpdate.isEmpty() || !args.isEmpty()) { - // Merge cached setters and live args together before updating. - Map<String, Object> mergedArgs = - new LinkedHashMap<String, Object>(); - mergedArgs.putAll(toUpdate); - mergedArgs.putAll(args); - - if (mergedArgs.containsKey("name") && !isNameChangeAllowed()) { - throw new IllegalStateException("Cannot set 'name' on an existing entity."); - } - - service.post(actionPath("edit"), mergedArgs); - toUpdate.clear(); - invalidate(); - } - } - - /** - * Updates the entity with the accumulated arguments, established by the - * individual setter methods for each specific entity class. - */ - @SuppressWarnings("unchecked") - public void update() { - update(Collections.EMPTY_MAP); - } - - - /** - * Update the access control list (ACL) properties for this entity, - * - * @param args: Properties to update for this entity. - * Required Properties in 'args' - * - `owner`: The Splunk username, such as "admin". A value of "nobody" means no specific user. - * - `sharing`: A mode that indicates how the resource is shared. The sharing mode can be "user", "app", "global", or "system". - */ - public void aclUpdate(Map<String, Object> args){ - if(!args.containsKey("sharing")){ - throw new IllegalArgumentException("Required argument 'sharing' is missing."); - } - if(!args.containsKey("owner")){ - throw new IllegalArgumentException("Required argument 'owner' is missing."); - } - service.post(actionPath("acl"), args); - invalidate(); - } - - /** - * Removes this entity from its corresponding collection. - */ - public void remove() { - service.delete(actionPath("remove")); - } - - /** {@inheritDoc} */ - @Override public Entity validate() { - super.validate(); - return this; - } - - /** {@inheritDoc} */ - public Collection<Object> values() { - return getContent().values(); - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.*; + +/** + * The {@code Entity} class represents a Splunk entity. + */ +public class Entity extends Resource implements Map<String, Object> { + protected Record content; + protected HashMap<String, Object> toUpdate = new LinkedHashMap<>(); + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The entity's endpoint. + */ + public Entity(Service service, String path) { + super(service, path); + } + + /** + * Returns the path that corresponds to the requested action. + * + * @param action The requested action. + * @return The return path. + */ + protected String actionPath(String action) { + if (action.equals("disable")) + return path + "/disable"; + if (action.equals("edit")) + return path; + if (action.equals("enable")) + return path + "/enable"; + if (action.equals("remove")) + return path; + if (action.equals("acl")) + return path + "/acl"; + throw new IllegalArgumentException("Invalid action: " + action); + } + + /** {@inheritDoc} */ + public void clear() { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + public boolean containsKey(Object key) { + return getContent().containsKey(key); + } + + /** {@inheritDoc} */ + public boolean containsValue(Object value) { + return getContent().containsValue(value); + } + + /** + * Disables the entity that is named by this endpoint. This method is + * available on almost every endpoint. + */ + public void disable() { + service.post(actionPath("disable")); + invalidate(); + } + + /** + * Enables the entity that is named by this endpoint. This method is + * available on almost every endpoint. + */ + public void enable() { + service.post(actionPath("enable")); + invalidate(); + } + + /** {@inheritDoc} */ + public Set<Map.Entry<String, Object>> entrySet() { + return getContent().entrySet(); + } + + /** {@inheritDoc} */ + public Object get(Object key) { + if (toUpdate.containsKey(key)) return toUpdate.get(key); + return getContent().get(key); + } + + /** + * Returns the Boolean value associated with the specified key. Values + * can be converted from: 0, 1, true, false. + * + * @param key The key to look up. + * @return The Boolean value associated with the specified key. + */ + boolean getBoolean(String key) { + if (toUpdate.containsKey(key)) + return Value.toBoolean(toUpdate.get(key).toString()); + return getContent().getBoolean(key); + } + + /** + * Returns the Boolean value associated with the specified key, or the + * default value if the key does not exist. Boolean values can be converted + * from: 0, 1, true, false. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The Boolean value associated with the specified key. + */ + boolean getBoolean(String key, boolean defaultValue) { + if (toUpdate.containsKey(key)) + return Value.toBoolean(toUpdate.get(key).toString()); + return getContent().getBoolean(key, defaultValue); + } + + /** + * Returns the long value associated with the specified key. Long values + * can be converted from: number, numberMB, numberGB. + * + * @param key The key to look up. + * @return The long value associated with the specified key. + */ + long getByteCount(String key) { + if (toUpdate.containsKey(key)) + return Value.toByteCount(toUpdate.get(key).toString()); + return getContent().getByteCount(key); + } + + /** + * Returns the long value associated with the specified key, or the default + * value if the key does not exist. Long values can be converted from: + * number, numberMB, numberGB. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The long value associated with the specified key. + */ + long getByteCount(String key, long defaultValue) { + if (toUpdate.containsKey(key)) + return Value.toByteCount(toUpdate.get(key).toString()); + return getContent().getByteCount(key, defaultValue); + } + + protected Record getContent() { + return validate().content; + } + + /** + * Returns a date value associated with the specified key. Date values can + * be converted from standard UTC time formats. + * + * @param key The key to look up. + * @return The date value associated with the specified key. + */ + Date getDate(String key) { + if (toUpdate.containsKey(key)) + return Value.toDate(toUpdate.get(key).toString()); + if (getContent().containsKey(key)) { + return getContent().getDate(key); + } else { + return null; + } + } + + /** + * Returns a date value associated with the specified key, or the default + * value if the key does not exist. Date values can be converted from + * standard UTC time formats. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The date value associated with the specified key. + */ + Date getDate(String key, Date defaultValue) { + if (toUpdate.containsKey(key)) + return Value.toDate(toUpdate.get(key).toString()); + return getContent().getDate(key, defaultValue); + } + + /** + * Returns the floating point value associated with the specified key. + * + * @param key The key to look up. + * @return The floating point value associated with the specified key. + */ + float getFloat(String key) { + if (toUpdate.containsKey(key)) + return Value.toFloat(toUpdate.get(key).toString()); + return getContent().getFloat(key); + } + + /** + * Returns the integer point value associated with the specified key. + * + * @param key The key to look up. + * @return The integer point value associated with the specified key. + */ + int getInteger(String key) { + if (toUpdate.containsKey(key)) + return Value.toInteger(toUpdate.get(key).toString()); + return getContent().getInteger(key); + } + + /** + * Returns the integer value associated with the specified key. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The integer value associated with the specified key. + */ + int getInteger(String key, int defaultValue) { + if (toUpdate.containsKey(key)) + return Value.toInteger(toUpdate.get(key).toString()); + return getContent().getInteger(key, defaultValue); + } + + /** + * Returns the long value associated with the specified key. + * + * @param key The key to look up. + * @return The long value associated with the specified key. + */ + long getLong(String key) { + if (toUpdate.containsKey(key)) + return Value.toLong(toUpdate.get(key).toString()); + return getContent().getLong(key); + } + + /** + * Returns the long value associated with the specified key. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The long value associated with the specified key. + */ + long getLong(String key, int defaultValue) { + if (toUpdate.containsKey(key)) + return Value.toLong(toUpdate.get(key).toString()); + return getContent().getLong(key, defaultValue); + } + + /** + * Returns the metadata (eai:acl) of this entity. This data includes + * permissions for accessing the resource, and values that indicate + * which resource fields are wildcards, required, and optional. + * + * @return The metadata of this entity, or {@code null} if none exist. + */ + public EntityMetadata getMetadata() { + // CONSIDER: For entities that don't have an eai:acl field, which is + // uncommon but does happen at least in the case of a DeploymentClient + // that is not enabled, we return null. A slightly friendlier option + // would be to return a metadata instance that defaults all values? + if (!containsKey("eai:acl")) return null; + return new EntityMetadata(this); + } + + /** + * Returns the string value associated with the specified key. + * + * @param key The key to look up. + * @return The string value associated with the specified key. + */ + String getString(String key) { + if (toUpdate.containsKey(key)) + return toUpdate.get(key).toString(); + return getContent().getString(key); + } + + /** + * Returns the string value associated with the specified key, or the + * default value if the key does not exist. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The string value associated with the specified key. + */ + String getString(String key, String defaultValue) { + if (toUpdate.containsKey(key)) + return toUpdate.get(key).toString(); + return getContent().getString(key, defaultValue); + } + + /** + * Returns the string array value associated with the specified key. + * + * @param key The key to look up. + * @return The string array value associated with the specified key. + */ + String[] getStringArray(String key) { + if (toUpdate.containsKey(key)) { + return ((String)toUpdate.get(key)).split("\\|"); + } + return getContent().getStringArray(key); + } + + /** + * Returns the string array value associated with the specified key, or the + * default value if the key does not exist. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The string array value associated with the specified key. + */ + String[] getStringArray(String key, String[] defaultValue) { + if (toUpdate.containsKey(key)) + return getStringArray(key); + return getContent().getStringArray(key, defaultValue); + } + + /** {@inheritDoc} */ + public boolean isEmpty() { + return getContent().isEmpty(); + } + + /** + * Indicates whether this entity is disabled. This method is + * available on almost every endpoint. + * + * @return {@code true} if this entity is disabled, {@code false} if + * enabled. + */ + public boolean isDisabled() { + return getBoolean("disabled", false); + } + + /** + * Returns whether this entity's name can be changed via {@link #update}. + * + * Most entity names cannot be changed in this way. + * @return false. + */ + protected boolean isNameChangeAllowed() { + return false; + } + + /** {@inheritDoc} */ + public Set<String> keySet() { + return getContent().keySet(); + } + + @Override + Entity load(AtomObject value) { + super.load(value); + AtomEntry entry = (AtomEntry)value; + if (entry == null) { + content = new Record(); + } + else { + content = entry.content; + } + return this; + } + + /** {@inheritDoc} */ + public Object put(String key, Object value) { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + public void putAll(Map<? extends String, ? extends Object> map) { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + @Override public Entity refresh() { + // Update any attribute values set by a setter method that has not + // yet been written to the object. + ResponseMessage response = service.get(path); + assert(response.getStatus() == 200); + AtomFeed feed; + try { + feed = AtomFeed.parseStream(response.getContent()); + } catch (Exception e) { + throw new RuntimeException(e); + } + int count = feed.entries.size(); + if (count > 1) { + throw new IllegalStateException("Expected 0 or 1 Atom entries; found " + feed.entries.size()); + } + AtomEntry entry = count == 0 ? null : feed.entries.get(0); + load(entry); + return this; + } + + /** {@inheritDoc} */ + public Object remove(Object key) { + throw new UnsupportedOperationException(); + } + + /** + * Sets the local cache update value. Writing is deferred until + * {@code update} has been called. + * + * @param key The key to set. + * @param value The default value. + */ + void setCacheValue(String key, Object value) { + toUpdate.put(key, value); + } + + /** {@inheritDoc} */ + public int size() { + return getContent().size(); + } + + /** + * Updates the entity with the values you previously set using the setter + * methods, and any additional specified arguments. The specified arguments + * take precedent over the values that were set using the setter methods. + * + * @param args The arguments to update. + */ + public void update(Map<String, Object> args) { + if (!toUpdate.isEmpty() || !args.isEmpty()) { + // Merge cached setters and live args together before updating. + Map<String, Object> mergedArgs = + new LinkedHashMap<>(); + mergedArgs.putAll(toUpdate); + mergedArgs.putAll(args); + + if (mergedArgs.containsKey("name") && !isNameChangeAllowed()) { + throw new IllegalStateException("Cannot set 'name' on an existing entity."); + } + + service.post(actionPath("edit"), mergedArgs); + toUpdate.clear(); + invalidate(); + } + } + + /** + * Updates the entity with the accumulated arguments, established by the + * individual setter methods for each specific entity class. + */ + @SuppressWarnings("unchecked") + public void update() { + update(Collections.EMPTY_MAP); + } + + + /** + * Update the access control list (ACL) properties for this entity, + * + * @param args: Properties to update for this entity. + * Required Properties in 'args' + * - `owner`: The Splunk username, such as "admin". A value of "nobody" means no specific user. + * - `sharing`: A mode that indicates how the resource is shared. The sharing mode can be "user", "app", "global", or "system". + */ + public void aclUpdate(Map<String, Object> args){ + if(!args.containsKey("sharing")){ + throw new IllegalArgumentException("Required argument 'sharing' is missing."); + } + if(!args.containsKey("owner")){ + throw new IllegalArgumentException("Required argument 'owner' is missing."); + } + service.post(actionPath("acl"), args); + invalidate(); + } + + /** + * Removes this entity from its corresponding collection. + */ + public void remove() { + service.delete(actionPath("remove")); + } + + /** {@inheritDoc} */ + @Override public Entity validate() { + super.validate(); + return this; + } + + /** {@inheritDoc} */ + public Collection<Object> values() { + return getContent().values(); + } +} + diff --git a/splunk/src/main/java/com/splunk/Event.java b/splunk/src/main/java/com/splunk/Event.java index a37b91bc..4c31e535 100644 --- a/splunk/src/main/java/com/splunk/Event.java +++ b/splunk/src/main/java/com/splunk/Event.java @@ -1,214 +1,214 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.HashMap; -import java.util.Map; -import java.util.regex.Pattern; - -/** - * The {@code Event} class wraps an individual event or result that was returned - * by the {@link ResultsReader#getNextEvent} method. - * <p> - * An event maps each field name to a list of zero of more values. - * These values can be accessed as either an array (using the {@link #getArray} - * method) or as a delimited string (using the {@link #get} method). We - * recommend accessing values as an array when possible. - * <p> - * The delimiter for field values depends on the underlying result format. - * If the underlying format does not specify a delimiter, such as with the - * {@link ResultsReaderXml} class, the delimiter is a comma (,). - */ -public class Event extends HashMap<String, String> { - private Map<String, String[]> arrayValues = new HashMap<String, String[]>(); - private String segmentedRaw; - - // Prevent non-SDK instantiation. - Event() { - // nothing - } - - /** - * Sets the single value or delimited set of values for the specified - * field name. - * - * When setting a multi-valued field, use the - * {@link #putArray(String, String[])} method instead. - * - * @param key The field name. - * @param valueOrDelimitedValues The single values or delimited set of - * values. - */ - String putSingleOrDelimited(String key, String valueOrDelimitedValues) { - return super.put(key, valueOrDelimitedValues); - } - - /** - * Sets the values for the specified field name, with the assumption that - * the value delimiter is a comma (,). - * - * @param key The field name. - * @param values The delimited set of values. - */ - void putArray(String key, String[] values) { - arrayValues.put(key, values); - - // For backward compatibility with the Map interface - super.put(key, Util.join(",", values)); - } - - /** - * Sets the value for the XML element for the {@code _raw} field. This value - * is only used by the {@link ResultsReaderXml} class. - * @param value The text of the XML element. - */ - void putSegmentedRaw(String value) { - segmentedRaw = value; - } - - /** - * Returns the single value or delimited set of values for the specified - * field name, or {@code null} if the specified field is not present. - * - * When getting a multi-valued field, use the {@link #getArray(String)} or - * {@link #getArray(String, String)} method instead. - * - * @param key The field name. - * @return The single value or delimited set of values. - */ - public String get(String key) { - return super.get(key); - } - - /** - * Gets the values for the specified field name. - * <br><br> - * <b>Caution:</b> This variant of {@link #getArray(String, String)} is - * unsafe for {@link ResultsReader} implementations that require a - * delimiter. Therefore, this method should only be used for results that - * are returned by {@link ResultsReaderXml}. For other readers, use the - * {@link #getArray(String, String)} method instead. - * <br><br> - * If the underlying {@link ResultsReader} object has no delimiter, the - * original array of values is returned. If the object <i>does</i> have a - * delimiter, the single/delimited value is assumed to be a single value and - * is returned as a single-valued array. - * - * @param key The field name. - * @return The original array of values if there is no delimiter, or the - * single-valued array. - */ - public String[] getArray(String key) { - String[] arrayValue = arrayValues.get(key); - if (arrayValue != null) { - return arrayValue; - } - - String singleValue = super.get(key); - if (singleValue == null) { - return null; - } - return new String[] { singleValue }; - } - - /** - * Gets the values for the specified field name. - * - * The delimiter must be determined empirically based on the search - * string and the data format of the index. The delimiter can differ - * between fields in the same {@link Event} object. - * - * The delimiter is ignored for {@link ResultsReader} implementations - * that do not require a delimiter, such as {@link ResultsReaderXml}. - * - * If the underlying {@link ResultsReader} object has no delimiter, the - * original array of values is returned (and the specified delimiter is - * ignored). If the object <i>does</i> have a delimiter, the - * single/delimited value is split based on the specified delimiter and is - * returned as an array. - * - * @param key The field name. - * @param delimiter The delimiter. - * @return The original array of values if there is no delimiter, or the - * array of values split by delimiter. - */ - public String[] getArray(String key, String delimiter) { - String[] arrayValue = arrayValues.get(key); - if (arrayValue != null) { - return arrayValue; - } - - String delimitedValues = super.get(key); - if (delimitedValues == null) { - return null; - } - return delimitedValues.split(Pattern.quote(delimiter)); - } - - /** - * Gets the XML markup for the {@code "_raw"} field value. This value - * is only used by the {@link ResultsReaderXml} class. - * <p> - * The return value is different than that of {@code get("_raw")} - * in that this segmented raw value is an XML fragment that includes all - * markup such as XML tags and escaped characters. - * <p> - * For example, {@code get("_raw")} returns this: - * <p> - * {@code "http://localhost:8000/en-US/app/search/flashtimeline?q=search%20search%20index%3D_internal%20%7C%20head%2010&earliest=rt-1h&latest=rt"} - * <p> - * The {@code getSegmentedRaw} method returns this: - * <p> - * {@code <v xml:space="preserve" trunc="0">"http://localhost:8000/en-US/app/<sg h=\"1\">search</sg>/flashtimeline?q=<sg h=\"1\">search</sg>%20<sg h=\"1\">search</sg>%20index%3D_internal%20%7C%20head%2010&earliest=rt-1h&latest=rt"</v>} - * @return the segmented raw xml including tags and escaped characters. - */ - public String getSegmentedRaw() { - if (segmentedRaw == null) { - // ResultsReaderXml will always set this to not null. Using this - // method for other result reader is not supported. - throw new UnsupportedOperationException( - "The value is not available. Use ResultsReaderXml instead."); - } - return segmentedRaw; - } - // === Read Only === - - @Override - public void clear() { - throw new UnsupportedOperationException(); - } - - @Override - public Object clone() { - throw new UnsupportedOperationException(); - } - - @Override - public String put(String key, String value) { - throw new UnsupportedOperationException(); - } - - @Override - public void putAll(Map<? extends String, ? extends String> m) { - throw new UnsupportedOperationException(); - } - - @Override - public String remove(Object key) { - throw new UnsupportedOperationException(); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Pattern; + +/** + * The {@code Event} class wraps an individual event or result that was returned + * by the {@link ResultsReader#getNextEvent} method. + * <p> + * An event maps each field name to a list of zero of more values. + * These values can be accessed as either an array (using the {@link #getArray} + * method) or as a delimited string (using the {@link #get} method). We + * recommend accessing values as an array when possible. + * <p> + * The delimiter for field values depends on the underlying result format. + * If the underlying format does not specify a delimiter, such as with the + * {@link ResultsReaderXml} class, the delimiter is a comma (,). + */ +public class Event extends HashMap<String, String> { + private Map<String, String[]> arrayValues = new HashMap<>(); + private String segmentedRaw; + + // Prevent non-SDK instantiation. + Event() { + // nothing + } + + /** + * Sets the single value or delimited set of values for the specified + * field name. + * + * When setting a multi-valued field, use the + * {@link #putArray(String, String[])} method instead. + * + * @param key The field name. + * @param valueOrDelimitedValues The single values or delimited set of + * values. + */ + String putSingleOrDelimited(String key, String valueOrDelimitedValues) { + return super.put(key, valueOrDelimitedValues); + } + + /** + * Sets the values for the specified field name, with the assumption that + * the value delimiter is a comma (,). + * + * @param key The field name. + * @param values The delimited set of values. + */ + void putArray(String key, String[] values) { + arrayValues.put(key, values); + + // For backward compatibility with the Map interface + super.put(key, Util.join(",", values)); + } + + /** + * Sets the value for the XML element for the {@code _raw} field. This value + * is only used by the {@link ResultsReaderXml} class. + * @param value The text of the XML element. + */ + void putSegmentedRaw(String value) { + segmentedRaw = value; + } + + /** + * Returns the single value or delimited set of values for the specified + * field name, or {@code null} if the specified field is not present. + * + * When getting a multi-valued field, use the {@link #getArray(String)} or + * {@link #getArray(String, String)} method instead. + * + * @param key The field name. + * @return The single value or delimited set of values. + */ + public String get(String key) { + return super.get(key); + } + + /** + * Gets the values for the specified field name. + * <br><br> + * <b>Caution:</b> This variant of {@link #getArray(String, String)} is + * unsafe for {@link ResultsReader} implementations that require a + * delimiter. Therefore, this method should only be used for results that + * are returned by {@link ResultsReaderXml}. For other readers, use the + * {@link #getArray(String, String)} method instead. + * <br><br> + * If the underlying {@link ResultsReader} object has no delimiter, the + * original array of values is returned. If the object <i>does</i> have a + * delimiter, the single/delimited value is assumed to be a single value and + * is returned as a single-valued array. + * + * @param key The field name. + * @return The original array of values if there is no delimiter, or the + * single-valued array. + */ + public String[] getArray(String key) { + String[] arrayValue = arrayValues.get(key); + if (arrayValue != null) { + return arrayValue; + } + + String singleValue = super.get(key); + if (singleValue == null) { + return null; + } + return new String[] { singleValue }; + } + + /** + * Gets the values for the specified field name. + * + * The delimiter must be determined empirically based on the search + * string and the data format of the index. The delimiter can differ + * between fields in the same {@link Event} object. + * + * The delimiter is ignored for {@link ResultsReader} implementations + * that do not require a delimiter, such as {@link ResultsReaderXml}. + * + * If the underlying {@link ResultsReader} object has no delimiter, the + * original array of values is returned (and the specified delimiter is + * ignored). If the object <i>does</i> have a delimiter, the + * single/delimited value is split based on the specified delimiter and is + * returned as an array. + * + * @param key The field name. + * @param delimiter The delimiter. + * @return The original array of values if there is no delimiter, or the + * array of values split by delimiter. + */ + public String[] getArray(String key, String delimiter) { + String[] arrayValue = arrayValues.get(key); + if (arrayValue != null) { + return arrayValue; + } + + String delimitedValues = super.get(key); + if (delimitedValues == null) { + return null; + } + return delimitedValues.split(Pattern.quote(delimiter)); + } + + /** + * Gets the XML markup for the {@code "_raw"} field value. This value + * is only used by the {@link ResultsReaderXml} class. + * <p> + * The return value is different than that of {@code get("_raw")} + * in that this segmented raw value is an XML fragment that includes all + * markup such as XML tags and escaped characters. + * <p> + * For example, {@code get("_raw")} returns this: + * <p> + * {@code "http://localhost:8000/en-US/app/search/flashtimeline?q=search%20search%20index%3D_internal%20%7C%20head%2010&earliest=rt-1h&latest=rt"} + * <p> + * The {@code getSegmentedRaw} method returns this: + * <p> + * {@code <v xml:space="preserve" trunc="0">"http://localhost:8000/en-US/app/<sg h=\"1\">search</sg>/flashtimeline?q=<sg h=\"1\">search</sg>%20<sg h=\"1\">search</sg>%20index%3D_internal%20%7C%20head%2010&earliest=rt-1h&latest=rt"</v>} + * @return the segmented raw xml including tags and escaped characters. + */ + public String getSegmentedRaw() { + if (segmentedRaw == null) { + // ResultsReaderXml will always set this to not null. Using this + // method for other result reader is not supported. + throw new UnsupportedOperationException( + "The value is not available. Use ResultsReaderXml instead."); + } + return segmentedRaw; + } + // === Read Only === + + @Override + public void clear() { + throw new UnsupportedOperationException(); + } + + @Override + public Object clone() { + throw new UnsupportedOperationException(); + } + + @Override + public String put(String key, String value) { + throw new UnsupportedOperationException(); + } + + @Override + public void putAll(Map<? extends String, ? extends String> m) { + throw new UnsupportedOperationException(); + } + + @Override + public String remove(Object key) { + throw new UnsupportedOperationException(); + } +} diff --git a/splunk/src/main/java/com/splunk/FieldType.java b/splunk/src/main/java/com/splunk/FieldType.java index 8f78a469..42d0264b 100644 --- a/splunk/src/main/java/com/splunk/FieldType.java +++ b/splunk/src/main/java/com/splunk/FieldType.java @@ -1,71 +1,71 @@ -/* - * Copyright 2014 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.HashMap; -import java.util.Map; - -/** - * Represents the type of a field in a data model object. - */ -public enum FieldType { - STRING { - public String toString() { return "string"; } - }, - NUMBER { - public String toString() { return "number"; } - }, - BOOLEAN { - public String toString() { return "boolean"; } - }, - IPV4 { - public String toString() { return "ipv4"; } - }, - TIMESTAMP { - public String toString() { return "timestamp"; } - }, - CHILDCOUNT { - public String toString() { return "childcount"; } - }, - OBJECTCOUNT { - public String toString() { return "objectcount"; } - }, - UNDEFINED { - public String toString() { - throw new UnsupportedOperationException("No serialization for undefined field type."); - } - }; - - private final static Map<String, FieldType> typeLookup = new HashMap<String, FieldType>() {{ - put("string", STRING); - put("number", NUMBER); - put("boolean", BOOLEAN); - put("ipv4", IPV4); - put("timestamp", TIMESTAMP); - put("childcount", CHILDCOUNT); - put("objectcount", OBJECTCOUNT); - }}; - - public static FieldType parseType(String text) { - FieldType result = typeLookup.get(text.toLowerCase()); - if (result == null) { - result = UNDEFINED; - } - return result; - } - -}; +/* + * Copyright 2014 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.HashMap; +import java.util.Map; + +/** + * Represents the type of a field in a data model object. + */ +public enum FieldType { + STRING { + public String toString() { return "string"; } + }, + NUMBER { + public String toString() { return "number"; } + }, + BOOLEAN { + public String toString() { return "boolean"; } + }, + IPV4 { + public String toString() { return "ipv4"; } + }, + TIMESTAMP { + public String toString() { return "timestamp"; } + }, + CHILDCOUNT { + public String toString() { return "childcount"; } + }, + OBJECTCOUNT { + public String toString() { return "objectcount"; } + }, + UNDEFINED { + public String toString() { + throw new UnsupportedOperationException("No serialization for undefined field type."); + } + }; + + private final static Map<String, FieldType> typeLookup = new HashMap<>() {{ + put("string", STRING); + put("number", NUMBER); + put("boolean", BOOLEAN); + put("ipv4", IPV4); + put("timestamp", TIMESTAMP); + put("childcount", CHILDCOUNT); + put("objectcount", OBJECTCOUNT); + }}; + + public static FieldType parseType(String text) { + FieldType result = typeLookup.get(text.toLowerCase()); + if (result == null) { + result = UNDEFINED; + } + return result; + } + +}; diff --git a/splunk/src/main/java/com/splunk/FiredAlertGroup.java b/splunk/src/main/java/com/splunk/FiredAlertGroup.java index 7d6b0994..2864b250 100644 --- a/splunk/src/main/java/com/splunk/FiredAlertGroup.java +++ b/splunk/src/main/java/com/splunk/FiredAlertGroup.java @@ -1,43 +1,43 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; -/** - * The {@code FiredAlertGroup} class represents a group of fired alerts, which - * are the alerts for a given saved search. - */ -public class FiredAlertGroup extends Entity { - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The fired alert endpoint. - */ - FiredAlertGroup(Service service, String path) { - super(service, path); - } - - /** - * Returns a group of fired alerts for a given saved search. - * - * @return The fired alerts in the group. - */ - public EntityCollection<FiredAlert> getAlerts() { - return new EntityCollection<FiredAlert>( - service, this.path, FiredAlert.class); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; +/** + * The {@code FiredAlertGroup} class represents a group of fired alerts, which + * are the alerts for a given saved search. + */ +public class FiredAlertGroup extends Entity { + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The fired alert endpoint. + */ + FiredAlertGroup(Service service, String path) { + super(service, path); + } + + /** + * Returns a group of fired alerts for a given saved search. + * + * @return The fired alerts in the group. + */ + public EntityCollection<FiredAlert> getAlerts() { + return new EntityCollection<>( + service, this.path, FiredAlert.class); + } +} diff --git a/splunk/src/main/java/com/splunk/HttpService.java b/splunk/src/main/java/com/splunk/HttpService.java index 8b1cb017..4adc7bee 100644 --- a/splunk/src/main/java/com/splunk/HttpService.java +++ b/splunk/src/main/java/com/splunk/HttpService.java @@ -47,7 +47,7 @@ public class HttpService { private static SSLSocketFactory sslSocketFactory = createSSLFactory(); private static String HTTPS_SCHEME = "https"; private static String HTTP_SCHEME = "http"; - private static List<String> VALID_HOSTS = new ArrayList<String>(Arrays.asList("localhost", "127.0.0.1", "::1")); + private static List<String> VALID_HOSTS = new ArrayList<>(Arrays.asList("localhost", "127.0.0.1", "::1")); private static final HostnameVerifier HOSTNAME_VERIFIER = new HostnameVerifier() { public boolean verify(String s, SSLSession sslSession) { @@ -85,8 +85,8 @@ public boolean verify(String s, SSLSession sslSession) { private String prefix = null; - static Map<String, String> defaultHeader = new HashMap<String, String>() {{ - put("User-Agent", "splunk-sdk-java/1.9.4"); + static Map<String, String> defaultHeader = new HashMap<>() {{ + put("User-Agent", "splunk-sdk-java/1.9.5"); put("Accept", "*/*"); }}; @@ -449,9 +449,9 @@ public ResponseMessage send(String path, RequestMessage request) { } catch (IOException e) { throw new RuntimeException(e.getMessage(), e); } - if (cn instanceof HttpsURLConnection) { - ((HttpsURLConnection) cn).setSSLSocketFactory(sslSocketFactory); - ((HttpsURLConnection) cn).setHostnameVerifier(HOSTNAME_VERIFIER); + if (cn instanceof HttpsURLConnection cnInst) { + cnInst.setSSLSocketFactory(sslSocketFactory); + cnInst.setHostnameVerifier(HOSTNAME_VERIFIER); } cn.setUseCaches(false); cn.setAllowUserInteraction(false); diff --git a/splunk/src/main/java/com/splunk/Index.java b/splunk/src/main/java/com/splunk/Index.java index f39dd2e7..4665098a 100644 --- a/splunk/src/main/java/com/splunk/Index.java +++ b/splunk/src/main/java/com/splunk/Index.java @@ -1,1118 +1,1112 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.IOException; -import java.io.OutputStream; -import java.net.Socket; -import java.util.Date; - -/** - * The {@code Index} class represents an index. - */ -public class Index extends Entity { - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The index endpoint. - */ - Index(Service service, String path) { - super(service, path); - } - - /** - * Creates a writable socket to this index. - * - * @return The writable socket. - * @throws IOException Throws exception if fails to write socket. - */ - public Socket attach() throws IOException { - Receiver receiver = service.getReceiver(); - return receiver.attach(getName()); - } - - /** - * Writes events to this index, reusing the connection. - * This method passes an output stream connected to the index to the - * {@code run} method of the {@code ReceiverBehavior} object, then handles - * setting up and tearing down the socket. - * <p> - * For an example of how to use this method, see - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" target="_blank">How to - * get data into Splunk</a> on - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" - * target="_blank">dev.splunk.com</a>. - * - * @param behavior The body of a {@code try} block as an anonymous - * implementation of the {@code ReceiverBehavior} interface. - * @throws IOException The IOException class - */ - public void attachWith(ReceiverBehavior behavior) throws IOException { - Socket socket = null; - OutputStream output = null; - try { - socket = attach(); - output = socket.getOutputStream(); - behavior.run(output); - output.flush(); - } finally { - if (output != null) { output.close(); } - if (socket != null) { socket.close(); } - } - } - - /** - * Creates a writable socket to this index. - * - * @param args Optional arguments for this stream. Valid parameters are: - * "host", "host_regex", "source", and "sourcetype". - * @return The socket. - * @throws IOException The IOException class - */ - public Socket attach(Args args) throws IOException { - Receiver receiver = service.getReceiver(); - return receiver.attach(getName(), args); - } - - /** - * Cleans this index, which removes all events from it. - * - * @param maxSeconds The maximum number of seconds to wait before returning. - * A value of -1 means to wait forever. - * @throws SplunkException If cleaning timed out or - * if the thread was interrupted. - * @return This index. - */ - public Index clean(int maxSeconds) { - Args saved = new Args(); - saved.put("maxTotalDataSizeMB", getMaxTotalDataSizeMB()); - saved.put("frozenTimePeriodInSecs", getFrozenTimePeriodInSecs()); - try { - Args reset = new Args(); - reset.put("maxTotalDataSizeMB", "1"); - reset.put("frozenTimePeriodInSecs", "1"); - update(reset); - rollHotBuckets(); - - long startTime = System.currentTimeMillis(); - long endTime = startTime + (maxSeconds * 1000); - while (true) { - long timeLeft = endTime - System.currentTimeMillis(); - if (timeLeft <= 0) { - break; - } - Thread.sleep(Math.min(1000, timeLeft)); - - if (this.getTotalEventCount() == 0) { - return this; - } - refresh(); - } - - throw new SplunkException(SplunkException.TIMEOUT, - "Index cleaning timed out"); - } - catch (InterruptedException e) - { - SplunkException f = new SplunkException( - SplunkException.INTERRUPTED, - "Index cleaning interrupted."); - f.initCause(e); - throw f; - } - finally { - update(saved); - } - } - - /** - * Indicates whether the data retrieved from this index has been - * UTF8-encoded. - * - * @return {@code true} if the retrieved data is in UTF8, {@code false} if - * not. - */ - public boolean getAssureUTF8() { - return getBoolean("assureUTF8"); - } - - /** - * Returns the total size of all bloom filter files. - * - * @return The total size of all bloom filter files, in KB. - */ - public int getBloomfilterTotalSizeKB() { - return getInteger("bloomfilterTotalSizeKB", 0); - } - - /** - * Returns the suggested size of the .tsidx file for the bucket rebuild - * process. - * Valid values are: "auto", a positive integer, or a positive - * integer followed by "KB", "MB", or "GB". - * - * @return The suggested size of the .tsidx file for the bucket rebuild - * process. - */ - public String getBucketRebuildMemoryHint() { - return getString("bucketRebuildMemoryHint"); - } - - /** - * Returns the absolute file path to the cold database for this index. - * This value may contain shell expansion terms. - * - * @return The absolute file path to the cold database, or {@code null} if - * not specified. - */ - public String getColdPath() { - return getString("coldPath", null); - } - - /** - * Returns the expanded absolute file path to the cold database for this - * index. - * - * @return The expanded absolute file path to the cold database, or - * {@code null} if not specified. - */ - public String getColdPathExpanded() { - return getString("coldPath_expanded", null); - } - - /** - * Returns the frozen archive destination path for this index. - * - * @return The frozen archive destination path, or {@code null} if not - * specified. - */ - public String getColdToFrozenDir() { - return getString("coldToFrozenDir", null); - } - - /** - * Returns the path to the archiving script. - * <p>For more info about archiving scripts, see the - * <a href="http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTindex#POST_data.2Findexes" - * target="_blank">POST data/indexes endpoint</a> in the REST API - * documentation. - * @see #getColdToFrozenDir - * - * @return The archiving script, or {@code null} if not specified. - */ - public String getColdToFrozenScript() { - return getString("coldToFrozenScript", null); - } - - /** - * Indicates whether raw data is compressed. - * - * @deprecated Splunk always compresses raw data. - * @return {@code true} if raw data is compressed, {@code false} if not. - */ - public boolean getCompressRawdata() { - return getBoolean("compressRawdata"); - } - - /** - * Returns the current size of this index. - * - * @return The current size of the index, in MB. - */ - public int getCurrentDBSizeMB() { - return getInteger("currentDBSizeMB"); - } - - /** - * Return the default index name of the Splunk instance. - * - * @return The default index name. - */ - public String getDefaultDatabase() { - return getString("defaultDatabase"); - } - - /** - * Returns whether asynchronous "online fsck" bucket repair is enabled. - * <p> - * When this feature is enabled, you don't have to wait for buckets to be - * repaired before starting Splunk, but you might notice a slight - * degradation in performance as a result. - * @return {@code true} if bucket repair is enabled, {@code false} if - * not. - */ - public boolean getEnableOnlineBucketRepair() { - - return getBoolean("enableOnlineBucketRepair"); - } - - /** - * Indicates whether real-time search is enabled for this index. - * - * @return {@code true} if real-time search is enabled, {@code false} if - * not. - */ - public boolean getEnableRealtimeSearch() { - return getBoolean("enableRealtimeSearch"); - } - - /** - * Returns the maximum age for a bucket, after which the data in this index - * rolls to frozen. If archiving is necessary for frozen data, see the - * {@code coldToFrozen} attributes. - * - * @return The maximum age, in seconds, after which data rolls to frozen. - */ - public int getFrozenTimePeriodInSecs() { - return getInteger("frozenTimePeriodInSecs"); - } - - /** - * Returns the absolute path to both hot and warm buckets for this index. - * This value may contain shell expansion terms. - * - * @return This index's absolute path to both hot and warm buckets, or - * {@code null} if not specified. - */ - public String getHomePath() { - return getString("homePath", null); - } - - /** - * Returns the expanded absolute path to both hot and warm buckets for this - * index. - * - * @return The expanded absolute path to both hot and warm buckets, or - * {@code null} if not specified. - */ - public String getHomePathExpanded() { - return getString("homePath_expanded", null); - } - - /** - * Returns the index thread for this index. - * - * @return The index thread. - */ - public String getIndexThreads() { - return getString("indexThreads"); - } - - /** - * Returns the last initialization time for this index. - * - * @return The last initialization time, or {@code null} if not specified. - */ - public String getLastInitTime() { - return getString("lastInitTime", null); - } - - /** - * Returns the time that indicates a bucket age. When a warm or cold bucket - * is older than this, Splunk does not create or rebuild its bloomfilter. - * The valid format is <i>number</i> followed by a time unit ("s", "m", "h", - * or "d"). For example, "30d" for 30 days. - * @return String value - */ - public String getMaxBloomBackfillBucketAge() { - return getString("maxBloomBackfillBucketAge", null); - } - - /** - * Returns the maximum number of concurrent optimize processes that - * can run against a hot bucket for this index. - * - * @return The maximum number of concurrent optimize processes. - */ - public int getMaxConcurrentOptimizes() { - return getInteger("maxConcurrentOptimizes"); - } - - /** - * Returns the maximum data size before triggering a roll from hot to warm - * buckets for this index. - * - * @return The maximum data size, in MB, or "auto" (which means 750MB), or - * "auto_high_volume" (which means 10GB on a 64-bit system, or 1GB on a - * 32-bit system). - * @see #setMaxDataSize - */ - public String getMaxDataSize() { - return getString("maxDataSize"); - } - - /** - * Returns the maximum number of hot buckets that can exist for this index. - * - * @return The maximum number of hot buckets or "auto" (which means 3). - */ - public String getMaxHotBuckets() { - return getString("maxHotBuckets"); - } - - /** - * Returns the maximum lifetime of a hot bucket for this index. - * If a hot bucket exceeds this value, Splunk rolls it to warm. - * A value of 0 means an infinite lifetime. - * - * @return The hot bucket's maximum lifetime, in seconds. - */ - public int getMaxHotIdleSecs() { - return getInteger("maxHotIdleSecs"); - } - - /** - * Returns the upper bound of the target maximum timespan of - * hot and warm buckets for this index. - * - * @return The upper bound of the target maximum timespan, in seconds. - */ - public int getMaxHotSpanSecs() { - return getInteger("maxHotSpanSecs"); - } - - /** - * Returns the amount of memory to allocate for buffering - * a single .tsidx file into memory before flushing to disk. - * - * @return The amount of memory, in MB. - */ - public int getMaxMemMB() { - return getInteger("maxMemMB"); - } - - /** - * Returns the maximum number of unique lines that are allowed - * in a bucket's .data files for this index. A value of 0 means infinite - * lines. - * - * @return The maximum number of unique lines. - */ - public int getMaxMetaEntries() { - return getInteger("maxMetaEntries"); - } - - /** - * Returns the maximum number of concurrent helper processes for this index. - * - * @return The maximum number of concurrent helper processes. - */ - public int getMaxRunningProcessGroups() { - return getInteger("maxRunningProcessGroups", 0); - } - - /** - * Returns the maximum time attribute for this index. - * - * @return The maximum time attribute, or {@code null} if not specified. - */ - public Date getMaxTime() { - return getDate("maxTime", null); - } - - /** - * Returns the maximum size of this index. If an index - * grows larger than this value, the oldest data is frozen. - * - * @return The maximum index size, in MB. - */ - public int getMaxTotalDataSizeMB() { - return getInteger("maxTotalDataSizeMB"); - } - - /** - * Returns the upper limit, in seconds, for how long an event can sit in a - * raw slice. This value applies only when replication is enabled for this - * index, and is ignored otherwise.<br> - * If there are any acknowledged events sharing this raw slice, the - * {@code MaxTimeUnreplicatedWithAcksparamater} applies instead. - * @see #getMaxTimeUnreplicatedWithAcks - * @return int value - */ - public int getMaxTimeUnreplicatedNoAcks() { - return getInteger("maxTimeUnreplicatedNoAcks"); - } - - /** - * Returns the upper limit, in seconds, for how long an event can sit - * unacknowledged in a raw slice. This value only applies when indexer - * acknowledgement is enabled on forwarders and replication is enabled with - * clustering. - * @return int value - */ - public int getMaxTimeUnreplicatedWithAcks() { - return getInteger("maxTimeUnreplicatedWithAcks"); - } - - /** - * Returns the maximum number of warm buckets for this index. If this - * value is exceeded, the warm buckets with the lowest value for their - * latest times are moved to cold. - * - * @return The maximum number of warm buckets. - */ - public int getMaxWarmDBCount() { - return getInteger("maxWarmDBCount"); - } - - /** - * Returns the memory pool for this index. - * - * @return The memory pool, in MB or "auto". - */ - public String getMemPoolMB() { - return getString("memPoolMB"); - } - - /** - * Returns the frequency at which Splunkd forces a filesystem sync while - * compressing journal slices for this index. - * <p> - * A value of "disable" disables this feature completely, while a value of 0 - * forces a file-system sync after completing compression of every journal - * slice. - * - * @return The file-system sync frequency, as an integer or "disable". - */ - public String getMinRawFileSyncSecs() { - return getString("minRawFileSyncSecs"); - } - - /** - * Returns the minimum time attribute for this index. - * - * @return The minimum time attribute, or {@code null} if not specified. - */ - public Date getMinTime() { - return getDate("minTime", null); - } - - /** - * Returns the number of hot buckets that were created for this index. - * - * @return The number of hot buckets. - */ - public int getNumHotBuckets() { - return getInteger("numHotBuckets", 0); - } - - /** - * Returns the number of warm buckets created for this index. - * - * @return The number of warm buckets. - */ - public int getNumWarmBuckets() { - return getInteger("numWarmBuckets", 0); - } - - /** - * Returns the number of bloom filters created for this index. - * - * @return The number of bloom filters. - */ - public int getNumBloomfilters() { - return getInteger("numBloomfilters", 0); - } - - /** - * Returns the frequency at which metadata is for partially synced (synced - * in-place) for this index. A value of 0 disables partial syncing, so - * metadata is only synced on the {@code ServiceMetaPeriod} interval. - * @see #getServiceMetaPeriod - * @see #setServiceMetaPeriod - * - * @return The metadata sync interval, in seconds. - */ - public int getPartialServiceMetaPeriod() { - return getInteger("partialServiceMetaPeriod"); - } - - /** - * Returns the future event-time quarantine for this index. Events - * that are newer than now plus this value are quarantined. - * - * @return The future event-time quarantine, in seconds. - */ - public int getQuarantineFutureSecs() { - return getInteger("quarantineFutureSecs"); - } - - /** - * Returns the past event-time quarantine for this index. Events - * that are older than now minus this value are quarantined. - * - * @return The past event-time quarantine, in seconds. - */ - public int getQuarantinePastSecs() { - return getInteger("quarantinePastSecs"); - } - - /** - * Returns the target uncompressed size of individual raw slices in the - * rawdata journal for this index. - * - * @return The target uncompressed size, in bytes. - */ - public int getRawChunkSizeBytes() { - return getInteger("rawChunkSizeBytes"); - } - - /** - * Returns the frequency to check for the need to create a new hot bucket - * and the need to roll or freeze any warm or cold buckets for this index. - * - * @return The check frequency, in seconds. - */ - public int getRotatePeriodInSecs() { - return getInteger("rotatePeriodInSecs"); - } - - /** - * Returns the frequency at which metadata is synced to disk for this index. - * - * @return The meta data sync frequency, in seconds. - */ - public int getServiceMetaPeriod() { - return getInteger("serviceMetaPeriod"); - } - - /** - * Returns a list of indexes that suppress "index missing" messages. - * - * @return A comma-separated list of indexes. - */ - public String getSuppressBannerList() { - return getString("suppressBannerList", null); - } - - /** - * Returns the number of events that trigger the indexer to sync events. - * This value is global, not a per-index value. - * - * @return The number of events that trigger the indexer to sync events. - */ - public int getSync() { - return getInteger("sync"); - } - - /** - * Indicates whether the sync operation is called before the file - * descriptor is closed on metadata updates. - * - * @return {@code true} if the sync operation is called before the file - * descriptor is closed on metadata updates, {@code false} if not. - */ - public boolean getSyncMeta() { - return getBoolean("syncMeta"); - } - - /** - * Returns the absolute path to the thawed index for this index. This value - * may contain shell expansion terms. - * - * @return The absolute path to the thawed index, or {@code null} if not - * specified. - */ - public String getThawedPath() { - return getString("thawedPath", null); - } - - /** - * Returns the expanded absolute path to the thawed index for this index. - * - * @return The expanded absolute path to the thawed index, or {@code null} - * if not specified. - */ - public String getThawedPathExpanded() { - return getString("thawedPath_expanded", null); - } - - /** - * Returns the frequency at which Splunk checks for an index throttling - * condition. - * - * @return The frequency of the throttling check, in seconds. - */ - public int getThrottleCheckPeriod() { - return getInteger("throttleCheckPeriod"); - } - - /** - * Returns the total event count for this index. - * - * @return The total event count. - */ - public int getTotalEventCount() { - return getInteger("totalEventCount"); - } - - /** - * Indicates whether this index is an internal index. - * - * @return {@code true} if this index is an internal index, {@code false} - * if not. - */ - public boolean isInternal() { - return getBoolean("isInternal"); - } - - /** - * Performs rolling hot buckets for this index. - */ - public void rollHotBuckets() { - ResponseMessage response = service.post(path + "/roll-hot-buckets"); - assert(response.getStatus() == 200); - } - - /** - * Sets whether the data retrieved from this index is UTF8-encoded. - * <p> - * <b>Note:</b> Indexing performance degrades when this parameter is set to - * {@code true}. - * - * In Splunk 5.0 and later, this is a global property and cannot be set on - * a per-index basis. - * - * @param assure {@code true} to ensure UTF8 encoding, {@code false} if not. - */ - public void setAssureUTF8(boolean assure) { - setCacheValue("assureUTF8", assure); - } - - /** - * Sets the number of events that make up a block for block signatures. A - * value of 100 is recommended. A value of 0 disables block signing for this - * index. - * - * @param value The event count for block signing. - */ - public void setBlockSignSize(int value) { - setCacheValue("blockSignSize", value); - } - - - /** - * Sets the suggested size of the .tsidx file for the bucket rebuild - * process. - * - * Valid values are: "auto", a positive integer, or a positive - * integer followed by "KB", "MB", or "GB". - * - * @param value The suggested size of the .tsidx file for the bucket rebuild - * process. - */ - public void setBucketRebuildMemoryHint(String value) { - setCacheValue("bucketRebuildMemoryHint", value); - } - - /** - * Sets the destination path for the frozen archive, where Splunk - * automatically puts frozen buckets. The bucket freezing policy is as - * follows: - * <ul><li><b>New-style buckets (4.2 and later):</b> All files are removed - * except the raw data. To thaw frozen buckets, run {@code Splunk rebuild - * <bucket dir>} on the bucket, then move the buckets to the thawed - * directory.</li> - * <li><b>Old-style buckets (4.1 and earlier):</b> gzip all the .data and - * .tsidx files. To thaw frozen buckets, gunzip the zipped files and move - * the buckets to the thawed directory.</li></ul> - * If both {@code coldToFrozenDir} and {@code coldToFrozenScript} are - * specified, {@code coldToFrozenDir} takes precedence. - * @see #setColdToFrozenScript - * @see #getColdToFrozenScript - * - * @param destination The destination path for the frozen archive. - */ - public void setColdToFrozenDir(String destination) { - setCacheValue("coldToFrozenDir", destination); - } - - /** - * Sets the path to the archiving script. - * <p>For more info about archiving scripts, see the - * <a href="http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTindex#POST_data.2Findexes" - * target="_blank">POST data/indexes endpoint</a> in the REST API - * documentation. - * @see #setColdToFrozenDir - * @see #getColdToFrozenDir - * - * @param script The path to the archiving script. - */ - public void setColdToFrozenScript(String script) { - setCacheValue("coldToFrozenScript", script); - } - - /** - * Sets whether asynchronous "online fsck" bucket repair is enabled. - * <p> - * When this feature is enabled, you don't have to wait for buckets to be - * repaired before starting Splunk, but you might notice a slight - * degradation in performance as a result. - * - * @param value {@code true} to enable online bucket repair, {@code false} - * if not. - */ - public void setEnableOnlineBucketRepair(boolean value) { - setCacheValue("enableOnlineBucketRepair", value); - } - - /** - * Sets the maximum age for a bucket, after which the data in this index - * rolls to frozen. Freezing data removes it from the index. To archive - * data, see {@code coldToFrozenDir} and {@code coldToFrozenScript}. - * @see #setColdToFrozenDir - * @see #setColdToFrozenScript - * - * @param seconds The time, in seconds, after which indexed data rolls to - * frozen. - */ - public void setFrozenTimePeriodInSecs(int seconds) { - setCacheValue("frozenTimePeriodInSecs", seconds); - } - - /** - * Sets the time that indicates a bucket age. When a warm or cold bucket - * is older than this, Splunk does not create or rebuild its bloomfilter. - * The valid format is <i>number</i> followed by a time unit ("s", "m", "h", - * or "d"). For example, "30d" for 30 days. - * @param time The time that indicates a bucket age. - */ - public void setMaxBloomBackfillBucketAge(String time) { - setCacheValue("maxBloomBackfillBucketAge", time); - } - - /** - * Sets the number of concurrent optimize processes that can run against - * a hot bucket for this index. - * - * @param processes The number of concurrent optimize processes. - */ - public void setMaxConcurrentOptimizes(int processes) { - setCacheValue("maxConcurrentOptimizes", processes); - } - - /** - * Sets the maximum data size before triggering a roll from hot to warm - * buckets for this index. You can also specify a value to let Splunk - * autotune this parameter: use "auto_high_volume" for high-volume indexes - * (such as the main index, or one that gets over 10GB of data per day); - * otherwise, use "auto". - * @see #getMaxDataSize - * - * @param size The size in MB, or an autotune string. - */ - public void setMaxDataSize(String size) { - setCacheValue("maxDataSize", size); - } - - /** - * Sets the maximum number of hot buckets that can exist per index. - * <p> - * When {@code maxHotBuckets} is exceeded, Splunk rolls the least recently - * used (LRU) hot bucket to warm. Both normal hot buckets and quarantined - * hot buckets count towards this total. This setting operates independently - * of {@code MaxHotIdleSecs}, which can also cause hot buckets to roll. - * @see #setMaxHotIdleSecs - * @see #getMaxHotIdleSecs - * - * @param size The maximum number of hot buckets per index, or an 'auto' string. - */ - public void setMaxHotBuckets(String size) { - setCacheValue("maxHotBuckets", size); - } - - /** - * Sets the maximum lifetime of a hot bucket for this index. - * <p> - * If a hot bucket exceeds this value, Splunk rolls it to warm. - * This setting operates independently of {@code MaxHotBuckets}, which can - * also cause hot buckets to roll. - * @see #setMaxHotBuckets - * @see #getMaxHotBuckets - * - * @param seconds The hot bucket's maximum lifetime, in seconds. A value of - * 0 means an infinite lifetime. - */ - public void setMaxHotIdleSecs(int seconds) { - setCacheValue("maxHotIdleSecs", seconds); - } - - /** - * Sets the upper bound of the target maximum timespan of hot and warm - * buckets for this index. - * <p> - * <b>Note:</b> If you set this too small, you can get an explosion of - * hot and warm buckets in the file system. The system sets a lower bound - * implicitly for this parameter at 3600, but this advanced parameter should - * be set with care and understanding of the characteristics of your data. - * - * @param seconds The upper bound of the target maximum timespan, in - * seconds. - */ - public void setMaxHotSpanSecs(int seconds) { - setCacheValue("maxHotSpanSecs", seconds); - } - - /** - * Sets the amount of memory allocated for buffering a single .tsidx - * file before flushing to disk. - * - * @param memory The amount of memory, in MB. - */ - public void setMaxMemMB(int memory) { - setCacheValue("maxMemMB", memory); - } - - /** - * Sets the maximum number of unique lines in .data files in a bucket, which - * may help to reduce memory consumption. - * <p> - * If this value is exceeded, a hot bucket is rolled to prevent a further - * increase. If your buckets are rolling due to Strings.data hitting this - * limit, the culprit might be the "punct" field in your data. If you don't - * use that field, it might be better to just disable this (see the - * props.conf.spec in $SPLUNK_HOME/etc/system/README). - * - * @param entries The maximum number of unique lines. A value of 0 means - * infinite lines. - */ - public void setMaxMetaEntries(int entries) { - setCacheValue("maxMetaEntries", entries); - } - - - /** - * Sets the upper limit for how long an event can sit in a - * raw slice. This value applies only when replication is enabled for this - * index, and is ignored otherwise.<br> - * If there are any acknowledged events sharing this raw slice, the - * {@code MaxTimeUnreplicatedWithAcksparamater} applies instead. - * - * @param value The upper limit, in seconds. A value of 0 disables this - * setting. - */ - public void setMaxTimeUnreplicatedNoAcks(int value) { - setCacheValue("maxTimeUnreplicatedNoAcks", value); - } - - /** - * Sets the upper limit for how long an event can sit unacknowledged in a - * raw slice. This value only applies when indexer acknowledgement is - * enabled on forwarders and replication is enabled with clustering. - * <p> - * This number should not exceed the acknowledgement timeout configured on - * any forwarder. - * - * @param value The upper limit, in seconds. A value of 0 disables this - * setting (not recommended). - */ - public void setMaxTimeUnreplicatedWithAcks(int value) { - setCacheValue("maxTimeUnreplicatedWithAcks", value); - } - - /** - * Sets the maximum size for this index. If an index grows larger than this - * value, the oldest data is frozen. - * - * @param size The maximum index size, in MB. - */ - public void setMaxTotalDataSizeMB(int size) { - setCacheValue("maxTotalDataSizeMB", size); - } - - /** - * Sets the maximum number of warm buckets. If this number is exceeded, - * the warm buckets with the lowest value for their latest times will be - * moved to cold. - * - * @param buckets The maximum number of warm buckets. - */ - public void setMaxWarmDBCount(int buckets) { - setCacheValue("maxWarmDBCount", buckets); - } - - /** - * Sets the frequency at which Splunkd forces a file system sync while - * compressing journal slices for this index. A value of "disable" disables - * this feature completely, while a value of 0 forces a file-system sync - * after completing compression of every journal slice. - * - * @param frequency The file-system sync frequency, as an integer or - * "disable". - */ - public void setMinRawFileSyncSecs(String frequency) { - setCacheValue("minRawFileSyncSecs", frequency); - } - - /** - * Sets the frequency at which metadata is for partially synced (synced - * in-place) for this index. A value of 0 disables partial syncing, so - * metadata is only synced on the {@code ServiceMetaPeriod} interval. - * @see #setServiceMetaPeriod - * @see #getServiceMetaPeriod - * - * @param frequency The metadata sync interval, in seconds. - */ - public void setPartialServiceMetaPeriod(int frequency) { - setCacheValue("partialServiceMetaPeriod", frequency); - } - - /** - * Sets a quarantine for events that are timestamped in the future to help - * prevent main hot buckets from being polluted with fringe events. Events - * that are newer than "now" plus this value are quarantined. - * - * @param window The future event-time quarantine, in seconds. - */ - public void setQuarantineFutureSecs(int window) { - setCacheValue("quarantineFutureSecs", window); - } - - /** - * Sets a quarantine for events that are timestamped in the past to help - * prevent main hot buckets from being polluted with fringe events. Events - * that are older than "now" plus this value are quarantined. - * - * @param window The past event-time quarantine, in seconds. - */ - public void setQuarantinePastSecs(int window) { - setCacheValue("quarantinePastSecs", window); - } - - /** - * Sets the target uncompressed size of individual raw slices in the rawdata - * journal for this index. - * <p> - * This parameter only specifies a target chunk size. The actual chunk size - * might be slightly larger by an amount proportional to an individual event - * size. - * <blockquote> - * <b>WARNING:</b> This is an advanced parameter. Only change it if you are - * instructed to do so by Splunk Support. - * </blockquote> - * @param size The target uncompressed size, in bytes. (0 is not a valid - * value--if 0 is used, this parameter is set to the default value.) - */ - public void setRawChunkSizeBytes(int size) { - setCacheValue("rawChunkSizeBytes", size); - } - - /** - * Sets the frequency to check for the need to create a new hot bucket and - * the need to roll or freeze any warm or cold buckets for this index. - * - * @param frequency The check frequency, in seconds. - */ - public void setRotatePeriodInSecs(int frequency) { - setCacheValue("rotatePeriodInSecs", frequency); - } - - /** - * Sets the frequency at which metadata is synced to disk for this index. - * - * @param frequency The meta data sync frequency, in seconds. - */ - public void setServiceMetaPeriod(int frequency) { - setCacheValue("serviceMetaPeriod", frequency); - } - - /** - * Sets whether the sync operation is called before the file descriptor is - * closed on metadata updates. - * <p> - * This functionality improves the integrity of metadata files, especially - * with regard to operating system crashes and machine failures. - * <blockquote> - * <b>WARNING:</b> This is an advanced parameter. Only change it if you are - * instructed to do so by Splunk Support. - * </blockquote> - * @param sync {@code true} to call the sync operation before the file - * descriptor is closed on metadata updates, {@code false} if not. - */ - public void setSyncMeta(boolean sync) { - setCacheValue("syncMeta", sync); - } - - /** - * Sets the frequency at which Splunk checks for an index throttling - * condition. - * - * @param frequency The frequency of the throttling check, in seconds. - */ - public void setThrottleCheckPeriod(int frequency) { - setCacheValue("throttleCheckPeriod", frequency); - } - - /** - * Submits an event to this index through an HTTP POST request. - * - * @param data The event data to post. - */ - public void submit(String data) { - Receiver receiver = service.getReceiver(); - receiver.submit(getName(), data); - } - - /** - * Submits an event to this index through an HTTP POST request. - * - * @param args Optional arguments for this request. Valid parameters are: - * "host", "host_regex", "source", and "sourcetype". - * @param data The event data to post. - */ - public void submit(Args args, String data) { - Receiver receiver = service.getReceiver(); - receiver.submit(getName(), args, data); - } - - /** - * Uploads a file to this index as an event stream. - * <p> - * <b>Note:</b> This file must be directly accessible by the Splunk server. - * - * @param filename The path and filename. - */ - public void upload(String filename) { - EntityCollection<Upload> uploads = service.getUploads(); - Args args = new Args("index", getName()); - uploads.create(filename, args); - } - - /** - * Uploads a file to this index as an event stream. - * <p> - * <b>Note:</b> This file must be directly accessible by the Splunk server. - * - * @param filename The path and filename. - * - * @param args Optional arguments for this request. Valid parameters are: - * "host", "sourcetype", "rename-source". More found at: - * http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTinput#data.2Finputs.2Foneshot - * - */ - public void upload(String filename, Args args) { - EntityCollection<Upload> uploads = service.getUploads(); - if(args.containsKey("index")){ - throw new IllegalArgumentException("The 'index' parameter cannot be passed to an index's oneshot upload."); - } - args.add("index", getName()); - uploads.create(filename, args); - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.IOException; +import java.io.OutputStream; +import java.net.Socket; +import java.util.Date; + +/** + * The {@code Index} class represents an index. + */ +public class Index extends Entity { + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The index endpoint. + */ + Index(Service service, String path) { + super(service, path); + } + + /** + * Creates a writable socket to this index. + * + * @return The writable socket. + * @throws IOException Throws exception if fails to write socket. + */ + public Socket attach() throws IOException { + Receiver receiver = service.getReceiver(); + return receiver.attach(getName()); + } + + /** + * Writes events to this index, reusing the connection. + * This method passes an output stream connected to the index to the + * {@code run} method of the {@code ReceiverBehavior} object, then handles + * setting up and tearing down the socket. + * <p> + * For an example of how to use this method, see + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" target="_blank">How to + * get data into Splunk</a> on + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" + * target="_blank">dev.splunk.com</a>. + * + * @param behavior The body of a {@code try} block as an anonymous + * implementation of the {@code ReceiverBehavior} interface. + * @throws IOException The IOException class + */ + public void attachWith(ReceiverBehavior behavior) throws IOException { + try (Socket socket = attach(); + OutputStream output = socket.getOutputStream();) { + behavior.run(output); + output.flush(); + } + } + + /** + * Creates a writable socket to this index. + * + * @param args Optional arguments for this stream. Valid parameters are: + * "host", "host_regex", "source", and "sourcetype". + * @return The socket. + * @throws IOException The IOException class + */ + public Socket attach(Args args) throws IOException { + Receiver receiver = service.getReceiver(); + return receiver.attach(getName(), args); + } + + /** + * Cleans this index, which removes all events from it. + * + * @param maxSeconds The maximum number of seconds to wait before returning. + * A value of -1 means to wait forever. + * @throws SplunkException If cleaning timed out or + * if the thread was interrupted. + * @return This index. + */ + public Index clean(int maxSeconds) { + Args saved = new Args(); + saved.put("maxTotalDataSizeMB", getMaxTotalDataSizeMB()); + saved.put("frozenTimePeriodInSecs", getFrozenTimePeriodInSecs()); + try { + Args reset = new Args(); + reset.put("maxTotalDataSizeMB", "1"); + reset.put("frozenTimePeriodInSecs", "1"); + update(reset); + rollHotBuckets(); + + long startTime = System.currentTimeMillis(); + long endTime = startTime + (maxSeconds * 1000); + while (true) { + long timeLeft = endTime - System.currentTimeMillis(); + if (timeLeft <= 0) { + break; + } + Thread.sleep(Math.min(1000, timeLeft)); + + if (this.getTotalEventCount() == 0) { + return this; + } + refresh(); + } + + throw new SplunkException(SplunkException.TIMEOUT, + "Index cleaning timed out"); + } + catch (InterruptedException e) + { + SplunkException f = new SplunkException( + SplunkException.INTERRUPTED, + "Index cleaning interrupted."); + f.initCause(e); + throw f; + } + finally { + update(saved); + } + } + + /** + * Indicates whether the data retrieved from this index has been + * UTF8-encoded. + * + * @return {@code true} if the retrieved data is in UTF8, {@code false} if + * not. + */ + public boolean getAssureUTF8() { + return getBoolean("assureUTF8"); + } + + /** + * Returns the total size of all bloom filter files. + * + * @return The total size of all bloom filter files, in KB. + */ + public int getBloomfilterTotalSizeKB() { + return getInteger("bloomfilterTotalSizeKB", 0); + } + + /** + * Returns the suggested size of the .tsidx file for the bucket rebuild + * process. + * Valid values are: "auto", a positive integer, or a positive + * integer followed by "KB", "MB", or "GB". + * + * @return The suggested size of the .tsidx file for the bucket rebuild + * process. + */ + public String getBucketRebuildMemoryHint() { + return getString("bucketRebuildMemoryHint"); + } + + /** + * Returns the absolute file path to the cold database for this index. + * This value may contain shell expansion terms. + * + * @return The absolute file path to the cold database, or {@code null} if + * not specified. + */ + public String getColdPath() { + return getString("coldPath", null); + } + + /** + * Returns the expanded absolute file path to the cold database for this + * index. + * + * @return The expanded absolute file path to the cold database, or + * {@code null} if not specified. + */ + public String getColdPathExpanded() { + return getString("coldPath_expanded", null); + } + + /** + * Returns the frozen archive destination path for this index. + * + * @return The frozen archive destination path, or {@code null} if not + * specified. + */ + public String getColdToFrozenDir() { + return getString("coldToFrozenDir", null); + } + + /** + * Returns the path to the archiving script. + * <p>For more info about archiving scripts, see the + * <a href="http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTindex#POST_data.2Findexes" + * target="_blank">POST data/indexes endpoint</a> in the REST API + * documentation. + * @see #getColdToFrozenDir + * + * @return The archiving script, or {@code null} if not specified. + */ + public String getColdToFrozenScript() { + return getString("coldToFrozenScript", null); + } + + /** + * Indicates whether raw data is compressed. + * + * @deprecated Splunk always compresses raw data. + * @return {@code true} if raw data is compressed, {@code false} if not. + */ + public boolean getCompressRawdata() { + return getBoolean("compressRawdata"); + } + + /** + * Returns the current size of this index. + * + * @return The current size of the index, in MB. + */ + public int getCurrentDBSizeMB() { + return getInteger("currentDBSizeMB"); + } + + /** + * Return the default index name of the Splunk instance. + * + * @return The default index name. + */ + public String getDefaultDatabase() { + return getString("defaultDatabase"); + } + + /** + * Returns whether asynchronous "online fsck" bucket repair is enabled. + * <p> + * When this feature is enabled, you don't have to wait for buckets to be + * repaired before starting Splunk, but you might notice a slight + * degradation in performance as a result. + * @return {@code true} if bucket repair is enabled, {@code false} if + * not. + */ + public boolean getEnableOnlineBucketRepair() { + + return getBoolean("enableOnlineBucketRepair"); + } + + /** + * Indicates whether real-time search is enabled for this index. + * + * @return {@code true} if real-time search is enabled, {@code false} if + * not. + */ + public boolean getEnableRealtimeSearch() { + return getBoolean("enableRealtimeSearch"); + } + + /** + * Returns the maximum age for a bucket, after which the data in this index + * rolls to frozen. If archiving is necessary for frozen data, see the + * {@code coldToFrozen} attributes. + * + * @return The maximum age, in seconds, after which data rolls to frozen. + */ + public int getFrozenTimePeriodInSecs() { + return getInteger("frozenTimePeriodInSecs"); + } + + /** + * Returns the absolute path to both hot and warm buckets for this index. + * This value may contain shell expansion terms. + * + * @return This index's absolute path to both hot and warm buckets, or + * {@code null} if not specified. + */ + public String getHomePath() { + return getString("homePath", null); + } + + /** + * Returns the expanded absolute path to both hot and warm buckets for this + * index. + * + * @return The expanded absolute path to both hot and warm buckets, or + * {@code null} if not specified. + */ + public String getHomePathExpanded() { + return getString("homePath_expanded", null); + } + + /** + * Returns the index thread for this index. + * + * @return The index thread. + */ + public String getIndexThreads() { + return getString("indexThreads"); + } + + /** + * Returns the last initialization time for this index. + * + * @return The last initialization time, or {@code null} if not specified. + */ + public String getLastInitTime() { + return getString("lastInitTime", null); + } + + /** + * Returns the time that indicates a bucket age. When a warm or cold bucket + * is older than this, Splunk does not create or rebuild its bloomfilter. + * The valid format is <i>number</i> followed by a time unit ("s", "m", "h", + * or "d"). For example, "30d" for 30 days. + * @return String value + */ + public String getMaxBloomBackfillBucketAge() { + return getString("maxBloomBackfillBucketAge", null); + } + + /** + * Returns the maximum number of concurrent optimize processes that + * can run against a hot bucket for this index. + * + * @return The maximum number of concurrent optimize processes. + */ + public int getMaxConcurrentOptimizes() { + return getInteger("maxConcurrentOptimizes"); + } + + /** + * Returns the maximum data size before triggering a roll from hot to warm + * buckets for this index. + * + * @return The maximum data size, in MB, or "auto" (which means 750MB), or + * "auto_high_volume" (which means 10GB on a 64-bit system, or 1GB on a + * 32-bit system). + * @see #setMaxDataSize + */ + public String getMaxDataSize() { + return getString("maxDataSize"); + } + + /** + * Returns the maximum number of hot buckets that can exist for this index. + * + * @return The maximum number of hot buckets or "auto" (which means 3). + */ + public String getMaxHotBuckets() { + return getString("maxHotBuckets"); + } + + /** + * Returns the maximum lifetime of a hot bucket for this index. + * If a hot bucket exceeds this value, Splunk rolls it to warm. + * A value of 0 means an infinite lifetime. + * + * @return The hot bucket's maximum lifetime, in seconds. + */ + public int getMaxHotIdleSecs() { + return getInteger("maxHotIdleSecs"); + } + + /** + * Returns the upper bound of the target maximum timespan of + * hot and warm buckets for this index. + * + * @return The upper bound of the target maximum timespan, in seconds. + */ + public int getMaxHotSpanSecs() { + return getInteger("maxHotSpanSecs"); + } + + /** + * Returns the amount of memory to allocate for buffering + * a single .tsidx file into memory before flushing to disk. + * + * @return The amount of memory, in MB. + */ + public int getMaxMemMB() { + return getInteger("maxMemMB"); + } + + /** + * Returns the maximum number of unique lines that are allowed + * in a bucket's .data files for this index. A value of 0 means infinite + * lines. + * + * @return The maximum number of unique lines. + */ + public int getMaxMetaEntries() { + return getInteger("maxMetaEntries"); + } + + /** + * Returns the maximum number of concurrent helper processes for this index. + * + * @return The maximum number of concurrent helper processes. + */ + public int getMaxRunningProcessGroups() { + return getInteger("maxRunningProcessGroups", 0); + } + + /** + * Returns the maximum time attribute for this index. + * + * @return The maximum time attribute, or {@code null} if not specified. + */ + public Date getMaxTime() { + return getDate("maxTime", null); + } + + /** + * Returns the maximum size of this index. If an index + * grows larger than this value, the oldest data is frozen. + * + * @return The maximum index size, in MB. + */ + public int getMaxTotalDataSizeMB() { + return getInteger("maxTotalDataSizeMB"); + } + + /** + * Returns the upper limit, in seconds, for how long an event can sit in a + * raw slice. This value applies only when replication is enabled for this + * index, and is ignored otherwise.<br> + * If there are any acknowledged events sharing this raw slice, the + * {@code MaxTimeUnreplicatedWithAcksparamater} applies instead. + * @see #getMaxTimeUnreplicatedWithAcks + * @return int value + */ + public int getMaxTimeUnreplicatedNoAcks() { + return getInteger("maxTimeUnreplicatedNoAcks"); + } + + /** + * Returns the upper limit, in seconds, for how long an event can sit + * unacknowledged in a raw slice. This value only applies when indexer + * acknowledgement is enabled on forwarders and replication is enabled with + * clustering. + * @return int value + */ + public int getMaxTimeUnreplicatedWithAcks() { + return getInteger("maxTimeUnreplicatedWithAcks"); + } + + /** + * Returns the maximum number of warm buckets for this index. If this + * value is exceeded, the warm buckets with the lowest value for their + * latest times are moved to cold. + * + * @return The maximum number of warm buckets. + */ + public int getMaxWarmDBCount() { + return getInteger("maxWarmDBCount"); + } + + /** + * Returns the memory pool for this index. + * + * @return The memory pool, in MB or "auto". + */ + public String getMemPoolMB() { + return getString("memPoolMB"); + } + + /** + * Returns the frequency at which Splunkd forces a filesystem sync while + * compressing journal slices for this index. + * <p> + * A value of "disable" disables this feature completely, while a value of 0 + * forces a file-system sync after completing compression of every journal + * slice. + * + * @return The file-system sync frequency, as an integer or "disable". + */ + public String getMinRawFileSyncSecs() { + return getString("minRawFileSyncSecs"); + } + + /** + * Returns the minimum time attribute for this index. + * + * @return The minimum time attribute, or {@code null} if not specified. + */ + public Date getMinTime() { + return getDate("minTime", null); + } + + /** + * Returns the number of hot buckets that were created for this index. + * + * @return The number of hot buckets. + */ + public int getNumHotBuckets() { + return getInteger("numHotBuckets", 0); + } + + /** + * Returns the number of warm buckets created for this index. + * + * @return The number of warm buckets. + */ + public int getNumWarmBuckets() { + return getInteger("numWarmBuckets", 0); + } + + /** + * Returns the number of bloom filters created for this index. + * + * @return The number of bloom filters. + */ + public int getNumBloomfilters() { + return getInteger("numBloomfilters", 0); + } + + /** + * Returns the frequency at which metadata is for partially synced (synced + * in-place) for this index. A value of 0 disables partial syncing, so + * metadata is only synced on the {@code ServiceMetaPeriod} interval. + * @see #getServiceMetaPeriod + * @see #setServiceMetaPeriod + * + * @return The metadata sync interval, in seconds. + */ + public int getPartialServiceMetaPeriod() { + return getInteger("partialServiceMetaPeriod"); + } + + /** + * Returns the future event-time quarantine for this index. Events + * that are newer than now plus this value are quarantined. + * + * @return The future event-time quarantine, in seconds. + */ + public int getQuarantineFutureSecs() { + return getInteger("quarantineFutureSecs"); + } + + /** + * Returns the past event-time quarantine for this index. Events + * that are older than now minus this value are quarantined. + * + * @return The past event-time quarantine, in seconds. + */ + public int getQuarantinePastSecs() { + return getInteger("quarantinePastSecs"); + } + + /** + * Returns the target uncompressed size of individual raw slices in the + * rawdata journal for this index. + * + * @return The target uncompressed size, in bytes. + */ + public int getRawChunkSizeBytes() { + return getInteger("rawChunkSizeBytes"); + } + + /** + * Returns the frequency to check for the need to create a new hot bucket + * and the need to roll or freeze any warm or cold buckets for this index. + * + * @return The check frequency, in seconds. + */ + public int getRotatePeriodInSecs() { + return getInteger("rotatePeriodInSecs"); + } + + /** + * Returns the frequency at which metadata is synced to disk for this index. + * + * @return The meta data sync frequency, in seconds. + */ + public int getServiceMetaPeriod() { + return getInteger("serviceMetaPeriod"); + } + + /** + * Returns a list of indexes that suppress "index missing" messages. + * + * @return A comma-separated list of indexes. + */ + public String getSuppressBannerList() { + return getString("suppressBannerList", null); + } + + /** + * Returns the number of events that trigger the indexer to sync events. + * This value is global, not a per-index value. + * + * @return The number of events that trigger the indexer to sync events. + */ + public int getSync() { + return getInteger("sync"); + } + + /** + * Indicates whether the sync operation is called before the file + * descriptor is closed on metadata updates. + * + * @return {@code true} if the sync operation is called before the file + * descriptor is closed on metadata updates, {@code false} if not. + */ + public boolean getSyncMeta() { + return getBoolean("syncMeta"); + } + + /** + * Returns the absolute path to the thawed index for this index. This value + * may contain shell expansion terms. + * + * @return The absolute path to the thawed index, or {@code null} if not + * specified. + */ + public String getThawedPath() { + return getString("thawedPath", null); + } + + /** + * Returns the expanded absolute path to the thawed index for this index. + * + * @return The expanded absolute path to the thawed index, or {@code null} + * if not specified. + */ + public String getThawedPathExpanded() { + return getString("thawedPath_expanded", null); + } + + /** + * Returns the frequency at which Splunk checks for an index throttling + * condition. + * + * @return The frequency of the throttling check, in seconds. + */ + public int getThrottleCheckPeriod() { + return getInteger("throttleCheckPeriod"); + } + + /** + * Returns the total event count for this index. + * + * @return The total event count. + */ + public int getTotalEventCount() { + return getInteger("totalEventCount"); + } + + /** + * Indicates whether this index is an internal index. + * + * @return {@code true} if this index is an internal index, {@code false} + * if not. + */ + public boolean isInternal() { + return getBoolean("isInternal"); + } + + /** + * Performs rolling hot buckets for this index. + */ + public void rollHotBuckets() { + ResponseMessage response = service.post(path + "/roll-hot-buckets"); + assert(response.getStatus() == 200); + } + + /** + * Sets whether the data retrieved from this index is UTF8-encoded. + * <p> + * <b>Note:</b> Indexing performance degrades when this parameter is set to + * {@code true}. + * + * In Splunk 5.0 and later, this is a global property and cannot be set on + * a per-index basis. + * + * @param assure {@code true} to ensure UTF8 encoding, {@code false} if not. + */ + public void setAssureUTF8(boolean assure) { + setCacheValue("assureUTF8", assure); + } + + /** + * Sets the number of events that make up a block for block signatures. A + * value of 100 is recommended. A value of 0 disables block signing for this + * index. + * + * @param value The event count for block signing. + */ + public void setBlockSignSize(int value) { + setCacheValue("blockSignSize", value); + } + + + /** + * Sets the suggested size of the .tsidx file for the bucket rebuild + * process. + * + * Valid values are: "auto", a positive integer, or a positive + * integer followed by "KB", "MB", or "GB". + * + * @param value The suggested size of the .tsidx file for the bucket rebuild + * process. + */ + public void setBucketRebuildMemoryHint(String value) { + setCacheValue("bucketRebuildMemoryHint", value); + } + + /** + * Sets the destination path for the frozen archive, where Splunk + * automatically puts frozen buckets. The bucket freezing policy is as + * follows: + * <ul><li><b>New-style buckets (4.2 and later):</b> All files are removed + * except the raw data. To thaw frozen buckets, run {@code Splunk rebuild + * <bucket dir>} on the bucket, then move the buckets to the thawed + * directory.</li> + * <li><b>Old-style buckets (4.1 and earlier):</b> gzip all the .data and + * .tsidx files. To thaw frozen buckets, gunzip the zipped files and move + * the buckets to the thawed directory.</li></ul> + * If both {@code coldToFrozenDir} and {@code coldToFrozenScript} are + * specified, {@code coldToFrozenDir} takes precedence. + * @see #setColdToFrozenScript + * @see #getColdToFrozenScript + * + * @param destination The destination path for the frozen archive. + */ + public void setColdToFrozenDir(String destination) { + setCacheValue("coldToFrozenDir", destination); + } + + /** + * Sets the path to the archiving script. + * <p>For more info about archiving scripts, see the + * <a href="http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTindex#POST_data.2Findexes" + * target="_blank">POST data/indexes endpoint</a> in the REST API + * documentation. + * @see #setColdToFrozenDir + * @see #getColdToFrozenDir + * + * @param script The path to the archiving script. + */ + public void setColdToFrozenScript(String script) { + setCacheValue("coldToFrozenScript", script); + } + + /** + * Sets whether asynchronous "online fsck" bucket repair is enabled. + * <p> + * When this feature is enabled, you don't have to wait for buckets to be + * repaired before starting Splunk, but you might notice a slight + * degradation in performance as a result. + * + * @param value {@code true} to enable online bucket repair, {@code false} + * if not. + */ + public void setEnableOnlineBucketRepair(boolean value) { + setCacheValue("enableOnlineBucketRepair", value); + } + + /** + * Sets the maximum age for a bucket, after which the data in this index + * rolls to frozen. Freezing data removes it from the index. To archive + * data, see {@code coldToFrozenDir} and {@code coldToFrozenScript}. + * @see #setColdToFrozenDir + * @see #setColdToFrozenScript + * + * @param seconds The time, in seconds, after which indexed data rolls to + * frozen. + */ + public void setFrozenTimePeriodInSecs(int seconds) { + setCacheValue("frozenTimePeriodInSecs", seconds); + } + + /** + * Sets the time that indicates a bucket age. When a warm or cold bucket + * is older than this, Splunk does not create or rebuild its bloomfilter. + * The valid format is <i>number</i> followed by a time unit ("s", "m", "h", + * or "d"). For example, "30d" for 30 days. + * @param time The time that indicates a bucket age. + */ + public void setMaxBloomBackfillBucketAge(String time) { + setCacheValue("maxBloomBackfillBucketAge", time); + } + + /** + * Sets the number of concurrent optimize processes that can run against + * a hot bucket for this index. + * + * @param processes The number of concurrent optimize processes. + */ + public void setMaxConcurrentOptimizes(int processes) { + setCacheValue("maxConcurrentOptimizes", processes); + } + + /** + * Sets the maximum data size before triggering a roll from hot to warm + * buckets for this index. You can also specify a value to let Splunk + * autotune this parameter: use "auto_high_volume" for high-volume indexes + * (such as the main index, or one that gets over 10GB of data per day); + * otherwise, use "auto". + * @see #getMaxDataSize + * + * @param size The size in MB, or an autotune string. + */ + public void setMaxDataSize(String size) { + setCacheValue("maxDataSize", size); + } + + /** + * Sets the maximum number of hot buckets that can exist per index. + * <p> + * When {@code maxHotBuckets} is exceeded, Splunk rolls the least recently + * used (LRU) hot bucket to warm. Both normal hot buckets and quarantined + * hot buckets count towards this total. This setting operates independently + * of {@code MaxHotIdleSecs}, which can also cause hot buckets to roll. + * @see #setMaxHotIdleSecs + * @see #getMaxHotIdleSecs + * + * @param size The maximum number of hot buckets per index, or an 'auto' string. + */ + public void setMaxHotBuckets(String size) { + setCacheValue("maxHotBuckets", size); + } + + /** + * Sets the maximum lifetime of a hot bucket for this index. + * <p> + * If a hot bucket exceeds this value, Splunk rolls it to warm. + * This setting operates independently of {@code MaxHotBuckets}, which can + * also cause hot buckets to roll. + * @see #setMaxHotBuckets + * @see #getMaxHotBuckets + * + * @param seconds The hot bucket's maximum lifetime, in seconds. A value of + * 0 means an infinite lifetime. + */ + public void setMaxHotIdleSecs(int seconds) { + setCacheValue("maxHotIdleSecs", seconds); + } + + /** + * Sets the upper bound of the target maximum timespan of hot and warm + * buckets for this index. + * <p> + * <b>Note:</b> If you set this too small, you can get an explosion of + * hot and warm buckets in the file system. The system sets a lower bound + * implicitly for this parameter at 3600, but this advanced parameter should + * be set with care and understanding of the characteristics of your data. + * + * @param seconds The upper bound of the target maximum timespan, in + * seconds. + */ + public void setMaxHotSpanSecs(int seconds) { + setCacheValue("maxHotSpanSecs", seconds); + } + + /** + * Sets the amount of memory allocated for buffering a single .tsidx + * file before flushing to disk. + * + * @param memory The amount of memory, in MB. + */ + public void setMaxMemMB(int memory) { + setCacheValue("maxMemMB", memory); + } + + /** + * Sets the maximum number of unique lines in .data files in a bucket, which + * may help to reduce memory consumption. + * <p> + * If this value is exceeded, a hot bucket is rolled to prevent a further + * increase. If your buckets are rolling due to Strings.data hitting this + * limit, the culprit might be the "punct" field in your data. If you don't + * use that field, it might be better to just disable this (see the + * props.conf.spec in $SPLUNK_HOME/etc/system/README). + * + * @param entries The maximum number of unique lines. A value of 0 means + * infinite lines. + */ + public void setMaxMetaEntries(int entries) { + setCacheValue("maxMetaEntries", entries); + } + + + /** + * Sets the upper limit for how long an event can sit in a + * raw slice. This value applies only when replication is enabled for this + * index, and is ignored otherwise.<br> + * If there are any acknowledged events sharing this raw slice, the + * {@code MaxTimeUnreplicatedWithAcksparamater} applies instead. + * + * @param value The upper limit, in seconds. A value of 0 disables this + * setting. + */ + public void setMaxTimeUnreplicatedNoAcks(int value) { + setCacheValue("maxTimeUnreplicatedNoAcks", value); + } + + /** + * Sets the upper limit for how long an event can sit unacknowledged in a + * raw slice. This value only applies when indexer acknowledgement is + * enabled on forwarders and replication is enabled with clustering. + * <p> + * This number should not exceed the acknowledgement timeout configured on + * any forwarder. + * + * @param value The upper limit, in seconds. A value of 0 disables this + * setting (not recommended). + */ + public void setMaxTimeUnreplicatedWithAcks(int value) { + setCacheValue("maxTimeUnreplicatedWithAcks", value); + } + + /** + * Sets the maximum size for this index. If an index grows larger than this + * value, the oldest data is frozen. + * + * @param size The maximum index size, in MB. + */ + public void setMaxTotalDataSizeMB(int size) { + setCacheValue("maxTotalDataSizeMB", size); + } + + /** + * Sets the maximum number of warm buckets. If this number is exceeded, + * the warm buckets with the lowest value for their latest times will be + * moved to cold. + * + * @param buckets The maximum number of warm buckets. + */ + public void setMaxWarmDBCount(int buckets) { + setCacheValue("maxWarmDBCount", buckets); + } + + /** + * Sets the frequency at which Splunkd forces a file system sync while + * compressing journal slices for this index. A value of "disable" disables + * this feature completely, while a value of 0 forces a file-system sync + * after completing compression of every journal slice. + * + * @param frequency The file-system sync frequency, as an integer or + * "disable". + */ + public void setMinRawFileSyncSecs(String frequency) { + setCacheValue("minRawFileSyncSecs", frequency); + } + + /** + * Sets the frequency at which metadata is for partially synced (synced + * in-place) for this index. A value of 0 disables partial syncing, so + * metadata is only synced on the {@code ServiceMetaPeriod} interval. + * @see #setServiceMetaPeriod + * @see #getServiceMetaPeriod + * + * @param frequency The metadata sync interval, in seconds. + */ + public void setPartialServiceMetaPeriod(int frequency) { + setCacheValue("partialServiceMetaPeriod", frequency); + } + + /** + * Sets a quarantine for events that are timestamped in the future to help + * prevent main hot buckets from being polluted with fringe events. Events + * that are newer than "now" plus this value are quarantined. + * + * @param window The future event-time quarantine, in seconds. + */ + public void setQuarantineFutureSecs(int window) { + setCacheValue("quarantineFutureSecs", window); + } + + /** + * Sets a quarantine for events that are timestamped in the past to help + * prevent main hot buckets from being polluted with fringe events. Events + * that are older than "now" plus this value are quarantined. + * + * @param window The past event-time quarantine, in seconds. + */ + public void setQuarantinePastSecs(int window) { + setCacheValue("quarantinePastSecs", window); + } + + /** + * Sets the target uncompressed size of individual raw slices in the rawdata + * journal for this index. + * <p> + * This parameter only specifies a target chunk size. The actual chunk size + * might be slightly larger by an amount proportional to an individual event + * size. + * <blockquote> + * <b>WARNING:</b> This is an advanced parameter. Only change it if you are + * instructed to do so by Splunk Support. + * </blockquote> + * @param size The target uncompressed size, in bytes. (0 is not a valid + * value--if 0 is used, this parameter is set to the default value.) + */ + public void setRawChunkSizeBytes(int size) { + setCacheValue("rawChunkSizeBytes", size); + } + + /** + * Sets the frequency to check for the need to create a new hot bucket and + * the need to roll or freeze any warm or cold buckets for this index. + * + * @param frequency The check frequency, in seconds. + */ + public void setRotatePeriodInSecs(int frequency) { + setCacheValue("rotatePeriodInSecs", frequency); + } + + /** + * Sets the frequency at which metadata is synced to disk for this index. + * + * @param frequency The meta data sync frequency, in seconds. + */ + public void setServiceMetaPeriod(int frequency) { + setCacheValue("serviceMetaPeriod", frequency); + } + + /** + * Sets whether the sync operation is called before the file descriptor is + * closed on metadata updates. + * <p> + * This functionality improves the integrity of metadata files, especially + * with regard to operating system crashes and machine failures. + * <blockquote> + * <b>WARNING:</b> This is an advanced parameter. Only change it if you are + * instructed to do so by Splunk Support. + * </blockquote> + * @param sync {@code true} to call the sync operation before the file + * descriptor is closed on metadata updates, {@code false} if not. + */ + public void setSyncMeta(boolean sync) { + setCacheValue("syncMeta", sync); + } + + /** + * Sets the frequency at which Splunk checks for an index throttling + * condition. + * + * @param frequency The frequency of the throttling check, in seconds. + */ + public void setThrottleCheckPeriod(int frequency) { + setCacheValue("throttleCheckPeriod", frequency); + } + + /** + * Submits an event to this index through an HTTP POST request. + * + * @param data The event data to post. + */ + public void submit(String data) { + Receiver receiver = service.getReceiver(); + receiver.submit(getName(), data); + } + + /** + * Submits an event to this index through an HTTP POST request. + * + * @param args Optional arguments for this request. Valid parameters are: + * "host", "host_regex", "source", and "sourcetype". + * @param data The event data to post. + */ + public void submit(Args args, String data) { + Receiver receiver = service.getReceiver(); + receiver.submit(getName(), args, data); + } + + /** + * Uploads a file to this index as an event stream. + * <p> + * <b>Note:</b> This file must be directly accessible by the Splunk server. + * + * @param filename The path and filename. + */ + public void upload(String filename) { + EntityCollection<Upload> uploads = service.getUploads(); + Args args = new Args("index", getName()); + uploads.create(filename, args); + } + + /** + * Uploads a file to this index as an event stream. + * <p> + * <b>Note:</b> This file must be directly accessible by the Splunk server. + * + * @param filename The path and filename. + * + * @param args Optional arguments for this request. Valid parameters are: + * "host", "sourcetype", "rename-source". More found at: + * http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTinput#data.2Finputs.2Foneshot + * + */ + public void upload(String filename, Args args) { + EntityCollection<Upload> uploads = service.getUploads(); + if(args.containsKey("index")){ + throw new IllegalArgumentException("The 'index' parameter cannot be passed to an index's oneshot upload."); + } + args.add("index", getName()); + uploads.create(filename, args); + } +} + diff --git a/splunk/src/main/java/com/splunk/InputCollection.java b/splunk/src/main/java/com/splunk/InputCollection.java index 0fcefa02..d8baf3a9 100644 --- a/splunk/src/main/java/com/splunk/InputCollection.java +++ b/splunk/src/main/java/com/splunk/InputCollection.java @@ -1,407 +1,407 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.*; - -/** - * The {@code InputCollection} class represents a collection of inputs. The - * collection is heterogeneous and each member contains an {@code InputKind} - * value that indicates the specific type of input. - */ -public class InputCollection extends EntityCollection<Input> { - protected Set<InputKind> inputKinds = new HashSet<InputKind>(); - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - */ - InputCollection(Service service) { - super(service, "data/inputs"); - } - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - */ - InputCollection(Service service, Args args) { - super(service, "data/inputs", args); - } - - /** {@inheritDoc} */ - @Override public boolean containsKey(Object key) { - Input input = retrieveInput((String)key); - return (input != null); - } - - /** - * Creates a stub for a new data input. - * - * @param name Depending on the type of input, a string that contains: - * <ul><li>The filename or directory and path (for monitor and oneshot - * inputs)</li> - * <li> The script name (for script inputs)</li> - * <li> The port number (for TCP and UDP inputs)</li> - * <li> The collection name (for Windows Perfmon and WMI inputs)</li> - * <li> The stanza (for Windows Registry inputs)</li> - * <li> The name of the configuration (for Windows AD inputs)</li></ul> - * @return No return value. - * @throws UnsupportedOperationException The UnsupportedOperationException instance - */ - @Override public Input create(String name) { - throw new UnsupportedOperationException(); - } - - /** - * Creates a stub for a new data input based on additional arguments. - * - * @param name Depending on the type of data input, a string that contains: - * <ul><li>The filename or directory and path (for monitor and oneshot - * inputs)</li> - * <li> The script name (for script inputs)</li> - * <li> The port number (for TCP and UDP inputs)</li> - * <li> The collection name (for Windows Perfmon and WMI inputs)</li> - * <li> The stanza (for Windows Registry inputs)</li> - * <li> The name of the configuration (for Windows AD inputs)</li></ul> - * @param args Optional arguments to define the data input. For a list of - * the available parameters, see - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2#inputparams" - * target="_blank">Input parameters</a> on - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" - * target="_blank">dev.splunk.com</a>. - * @return No return value. - * @throws UnsupportedOperationException The UnsupportedOperationException instance - */ - @Override public Input create(String name, Map args) { - throw new UnsupportedOperationException(); - } - - /** - * Creates a new data input based on the input kind. - * - * @param name Depending on the type of data input, a string that contains: - * <ul><li>The filename or directory and path (for monitor and oneshot - * inputs)</li> - * <li> The script name (for script inputs)</li> - * <li> The port number (for TCP and UDP inputs)</li> - * <li> The collection name (for Windows Perfmon and WMI inputs)</li> - * <li> The stanza (for Windows Registry inputs)</li> - * <li> The name of the configuration (for Windows AD inputs)</li></ul> - * @param kind A member of {@code InputKind}, indicating the type of input. - * @param <T> The implicit type of the input. - * @return The {@code Input} that was created. - */ - public <T extends Input> T create(String name, InputKind kind) { - return (T)create(name, kind, (Map<String, Object>)null); - } - - /** - * Creates a new data input based on the input kind and additional - * arguments. - * - * @param name Depending on the type of data input, a string that contains: - * <ul><li>The filename or directory and path (for monitor and oneshot - * inputs)</li> - * <li> The script name (for script inputs)</li> - * <li> The port number (for TCP and UDP inputs)</li> - * <li> The collection name (for Windows Perfmon and WMI inputs)</li> - * <li> The stanza (for Windows Registry inputs)</li> - * <li> The name of the configuration (for Windows AD inputs)</li></ul> - * @param kind A member of {@code InputKind}, indicating the type of input. - * @param args Optional arguments to define the data input. For a list of - * the available parameters, see - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2#inputparams" - * target="_blank">Input parameters</a> on - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" - * target="_blank">dev.splunk.com</a>. - * @param <T> The implicit type of the input. - * @return The {@code Input} that was created. - */ - public <T extends Input> T - create(String name, InputKind kind, Map<String, Object> args) { - args = Args.create(args).add("name", name); - String path = this.path + "/" + kind.getRelativePath(); - service.post(path, args); - - invalidate(); - - return (T)get(name); - } - - /** - * Creates a new data input based on an Atom entry. - * - * @param entry The {@code AtomEntry} object describing the entry. - * @return The {@code Input} that was created. - */ - @Override - protected Input createItem(AtomEntry entry) { - String path = itemPath(entry); - InputKind kind = itemKind(path); - Class inputClass = kind.getInputClass(); - return createItem(inputClass, path, null); - } - - /** - * {@inheritDoc} - */ - @Override public Input get(Object key) { - return retrieveInput((String)key); - } - - /** - * Returns the value of a scoped, namespace-constrained key, if it - * exists within this collection. - * - * @param key The key to look up. - * @param namespace The namespace to constrain the search to. - * @return The value indexed by the key, or {@code null} if it doesn't - * exist. - */ - public Input get(Object key, Args namespace) { - Util.ensureNamespaceIsExact(namespace); - return retrieveInput((String)key, namespace); - } - - /** - * Returns the input kind for a given path. - * - * @param path The relative endpoint path (the path that follows - * data/inputs). - * @return A member of {@code InputKind}, indicating the type of input. - */ - protected InputKind itemKind(String path) { - String relpathWithInputName = Util.substringAfter(path, "/data/inputs/", null); - for (InputKind kind : inputKinds) { - if (relpathWithInputName.startsWith(kind.getRelativePath())) { - return kind; - } - } - - // Not good. This means that there is an input of an unknown kind. - return InputKind.Unknown; - } - - /** - * Return a set of all the input kinds recognized by the Splunk server. - * - * @return A set of {@code InputKind}s. - */ - public Set<InputKind> getInputKinds() { - return this.inputKinds; - } - - /** - * Indicates whether a given string matches the input name (string - * equality). For scripted inputs, which are listed by their full path, this - * method compares only the final component of the filename for a match. - * - * @param kind A member of {@code InputKind}, indicating the type of input. - * @param searchFor A string to search for. - * @param searchIn The string that contains the input name. - * @return {@code true} if the string matches the input name, {@code false} - * if not. - */ - protected static boolean matchesInputName(InputKind kind, String searchFor, String searchIn) { - if (kind == InputKind.Script) { - return searchIn.endsWith("/" + searchFor) || searchIn.endsWith("\\" + searchFor); - } else { - return searchFor.equals(searchIn); - } - } - - - /** - * Assembles a set of all the input kinds that are available on this Splunk - * instance. To list all inputs, pass an empty list to {@code subPath}. Or, - * specify a component of the path such as "tcp" to list all TCP inputs. - * - * @param subPath A list of strings containing the components of the - * endpoint path that follow data/inputs/. - * @return A set of available {@code InputKind}s. - */ - private Set<InputKind> assembleInputKindSet(List<String> subPath) { - Set<InputKind> kinds = new HashSet<InputKind>(); - ResponseMessage response = service.get(this.path + "/" + Util.join("/", subPath)); - AtomFeed feed = AtomFeed.parseStream(response.getContent()); - for (AtomEntry entry : feed.entries) { - String itemKeyName = itemKey(entry); - - boolean hasCreateLink = false; - for (String linkName : entry.links.keySet()) { - if (linkName.equals("create")) { - hasCreateLink = true; - } - } - - List<String> thisSubPath = new ArrayList<String>(subPath); - thisSubPath.add(itemKeyName); - - String relpath = Util.join("/", thisSubPath); - - if (relpath.equals("all") || relpath.equals("tcp/ssl")) { - // Skip these input types - continue; - } else if (hasCreateLink) { - // Found an InputKind leaf - InputKind newKind = InputKind.create(relpath); - kinds.add(newKind); - } else { - Set<InputKind> subKinds = assembleInputKindSet(thisSubPath); - kinds.addAll(subKinds); - } - } - return kinds; - } - - /** - * Refreshes the {@code inputKinds} field on this object. - */ - private void refreshInputKinds() { - Set<InputKind> kinds = assembleInputKindSet(new ArrayList<String>()); - - this.inputKinds.clear(); - this.inputKinds.addAll(kinds); - } - - /** - * Refreshes this input collection. - * - * @return The refreshed {@code InputCollection}. - */ - @Override public InputCollection refresh() { - // Populate this.inputKinds - refreshInputKinds(); - - items.clear(); - - // Iterate over all input kinds and collect all instances. - for (InputKind kind : this.inputKinds) { - if (service.versionIsAtLeast("6.0.0")) { - // In Splunk 6 and later, the registry endpoint has been deprecated in favor of the new - // WinRegMon modular input, but both now point to the same place. To avoid duplicates, we have - // to read only one of them. - if (kind.getKind().equals("registry")) { - continue; - } - } - String relpath = kind.getRelativePath(); - String inputs = String.format("%s/%s?count=-1", path, relpath); - ResponseMessage response; - try { - response = service.get(inputs); - } - catch (HttpException e) { - // On some platforms certain input endpoints don't exist, for - // example the Windows inputs endpoints don't exist on non- - // Windows platforms. - if (e.getStatus() == 404) continue; - throw e; - } - AtomFeed feed; - try { - feed = AtomFeed.parseStream(response.getContent()); - } catch (Exception e) { - throw new RuntimeException(e); - } - load(feed); - } - - return this; - } - - /** - * {@inheritDoc} - */ - @Override public Input remove(String key) { - Input input = retrieveInput(key); - if (input != null) { - input.remove(); - } - return input; - } - - /** - * {@inheritDoc} - */ - @Override public Input remove( - String key, Args namespace) { - Util.ensureNamespaceIsExact(namespace); - - Input input = retrieveInput(key, namespace); - if (input != null) { - input.remove(); - } - return input; - } - - private Input retrieveInput(String key) { - validate(); - - // Because scripted input names are not 1:1 with the original name - // (they are the absolute path on the Splunk instance followed by - // the original name), we will iterate over the entities in the list, - // and if we find one that matches, return it. - Set<Entry<String, LinkedList<Input>>> set = items.entrySet(); - for (Entry<String, LinkedList<Input>> entry: set) { - String entryKey = entry.getKey(); - LinkedList<Input> entryValue = entry.getValue(); - InputKind kind = entryValue.get(0).getKind(); - - if (InputCollection.matchesInputName(kind, key, entryKey)) { - if (entryValue.size() > 1) { - throw new SplunkException(SplunkException.AMBIGUOUS, - "Multiple inputs matched " + key + "; specify a namespace to disambiguate."); - } else { - return entryValue.get(0); - } - } - } - return null; - } - - private Input retrieveInput(String key, Args namespace) { - Util.ensureNamespaceIsExact(namespace); - validate(); - - // Because scripted input names are not 1:1 with the original name - // (they are the absolute path on the Splunk instance followed by - // the original name), we will iterate over the entities in the list, - // and if we find one that matches, return it. - String pathMatcher = service.fullpath("", namespace); - Set<Entry<String, LinkedList<Input>>> set = items.entrySet(); - for (Entry<String, LinkedList<Input>> entry: set) { - String entryKey = entry.getKey(); - LinkedList<Input> entryValue = entry.getValue(); - InputKind kind = entryValue.get(0).getKind(); - - if (InputCollection.matchesInputName(kind, key, entryKey)) { - for (Input entity: entryValue) { - if (entity.path.startsWith(pathMatcher)) { - return entity; - } - } - } - } - return null; - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.*; + +/** + * The {@code InputCollection} class represents a collection of inputs. The + * collection is heterogeneous and each member contains an {@code InputKind} + * value that indicates the specific type of input. + */ +public class InputCollection extends EntityCollection<Input> { + protected Set<InputKind> inputKinds = new HashSet<>(); + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + */ + InputCollection(Service service) { + super(service, "data/inputs"); + } + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + */ + InputCollection(Service service, Args args) { + super(service, "data/inputs", args); + } + + /** {@inheritDoc} */ + @Override public boolean containsKey(Object key) { + Input input = retrieveInput((String)key); + return (input != null); + } + + /** + * Creates a stub for a new data input. + * + * @param name Depending on the type of input, a string that contains: + * <ul><li>The filename or directory and path (for monitor and oneshot + * inputs)</li> + * <li> The script name (for script inputs)</li> + * <li> The port number (for TCP and UDP inputs)</li> + * <li> The collection name (for Windows Perfmon and WMI inputs)</li> + * <li> The stanza (for Windows Registry inputs)</li> + * <li> The name of the configuration (for Windows AD inputs)</li></ul> + * @return No return value. + * @throws UnsupportedOperationException The UnsupportedOperationException instance + */ + @Override public Input create(String name) { + throw new UnsupportedOperationException(); + } + + /** + * Creates a stub for a new data input based on additional arguments. + * + * @param name Depending on the type of data input, a string that contains: + * <ul><li>The filename or directory and path (for monitor and oneshot + * inputs)</li> + * <li> The script name (for script inputs)</li> + * <li> The port number (for TCP and UDP inputs)</li> + * <li> The collection name (for Windows Perfmon and WMI inputs)</li> + * <li> The stanza (for Windows Registry inputs)</li> + * <li> The name of the configuration (for Windows AD inputs)</li></ul> + * @param args Optional arguments to define the data input. For a list of + * the available parameters, see + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2#inputparams" + * target="_blank">Input parameters</a> on + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" + * target="_blank">dev.splunk.com</a>. + * @return No return value. + * @throws UnsupportedOperationException The UnsupportedOperationException instance + */ + @Override public Input create(String name, Map args) { + throw new UnsupportedOperationException(); + } + + /** + * Creates a new data input based on the input kind. + * + * @param name Depending on the type of data input, a string that contains: + * <ul><li>The filename or directory and path (for monitor and oneshot + * inputs)</li> + * <li> The script name (for script inputs)</li> + * <li> The port number (for TCP and UDP inputs)</li> + * <li> The collection name (for Windows Perfmon and WMI inputs)</li> + * <li> The stanza (for Windows Registry inputs)</li> + * <li> The name of the configuration (for Windows AD inputs)</li></ul> + * @param kind A member of {@code InputKind}, indicating the type of input. + * @param <T> The implicit type of the input. + * @return The {@code Input} that was created. + */ + public <T extends Input> T create(String name, InputKind kind) { + return (T)create(name, kind, (Map<String, Object>)null); + } + + /** + * Creates a new data input based on the input kind and additional + * arguments. + * + * @param name Depending on the type of data input, a string that contains: + * <ul><li>The filename or directory and path (for monitor and oneshot + * inputs)</li> + * <li> The script name (for script inputs)</li> + * <li> The port number (for TCP and UDP inputs)</li> + * <li> The collection name (for Windows Perfmon and WMI inputs)</li> + * <li> The stanza (for Windows Registry inputs)</li> + * <li> The name of the configuration (for Windows AD inputs)</li></ul> + * @param kind A member of {@code InputKind}, indicating the type of input. + * @param args Optional arguments to define the data input. For a list of + * the available parameters, see + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2#inputparams" + * target="_blank">Input parameters</a> on + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" + * target="_blank">dev.splunk.com</a>. + * @param <T> The implicit type of the input. + * @return The {@code Input} that was created. + */ + public <T extends Input> T + create(String name, InputKind kind, Map<String, Object> args) { + args = Args.create(args).add("name", name); + String path = this.path + "/" + kind.getRelativePath(); + service.post(path, args); + + invalidate(); + + return (T)get(name); + } + + /** + * Creates a new data input based on an Atom entry. + * + * @param entry The {@code AtomEntry} object describing the entry. + * @return The {@code Input} that was created. + */ + @Override + protected Input createItem(AtomEntry entry) { + String path = itemPath(entry); + InputKind kind = itemKind(path); + Class inputClass = kind.getInputClass(); + return createItem(inputClass, path, null); + } + + /** + * {@inheritDoc} + */ + @Override public Input get(Object key) { + return retrieveInput((String)key); + } + + /** + * Returns the value of a scoped, namespace-constrained key, if it + * exists within this collection. + * + * @param key The key to look up. + * @param namespace The namespace to constrain the search to. + * @return The value indexed by the key, or {@code null} if it doesn't + * exist. + */ + public Input get(Object key, Args namespace) { + Util.ensureNamespaceIsExact(namespace); + return retrieveInput((String)key, namespace); + } + + /** + * Returns the input kind for a given path. + * + * @param path The relative endpoint path (the path that follows + * data/inputs). + * @return A member of {@code InputKind}, indicating the type of input. + */ + protected InputKind itemKind(String path) { + String relpathWithInputName = Util.substringAfter(path, "/data/inputs/", null); + for (InputKind kind : inputKinds) { + if (relpathWithInputName.startsWith(kind.getRelativePath())) { + return kind; + } + } + + // Not good. This means that there is an input of an unknown kind. + return InputKind.Unknown; + } + + /** + * Return a set of all the input kinds recognized by the Splunk server. + * + * @return A set of {@code InputKind}s. + */ + public Set<InputKind> getInputKinds() { + return this.inputKinds; + } + + /** + * Indicates whether a given string matches the input name (string + * equality). For scripted inputs, which are listed by their full path, this + * method compares only the final component of the filename for a match. + * + * @param kind A member of {@code InputKind}, indicating the type of input. + * @param searchFor A string to search for. + * @param searchIn The string that contains the input name. + * @return {@code true} if the string matches the input name, {@code false} + * if not. + */ + protected static boolean matchesInputName(InputKind kind, String searchFor, String searchIn) { + if (kind == InputKind.Script) { + return searchIn.endsWith("/" + searchFor) || searchIn.endsWith("\\" + searchFor); + } else { + return searchFor.equals(searchIn); + } + } + + + /** + * Assembles a set of all the input kinds that are available on this Splunk + * instance. To list all inputs, pass an empty list to {@code subPath}. Or, + * specify a component of the path such as "tcp" to list all TCP inputs. + * + * @param subPath A list of strings containing the components of the + * endpoint path that follow data/inputs/. + * @return A set of available {@code InputKind}s. + */ + private Set<InputKind> assembleInputKindSet(List<String> subPath) { + Set<InputKind> kinds = new HashSet<>(); + ResponseMessage response = service.get(this.path + "/" + Util.join("/", subPath)); + AtomFeed feed = AtomFeed.parseStream(response.getContent()); + for (AtomEntry entry : feed.entries) { + String itemKeyName = itemKey(entry); + + boolean hasCreateLink = false; + for (String linkName : entry.links.keySet()) { + if (linkName.equals("create")) { + hasCreateLink = true; + } + } + + List<String> thisSubPath = new ArrayList<>(subPath); + thisSubPath.add(itemKeyName); + + String relpath = Util.join("/", thisSubPath); + + if (relpath.equals("all") || relpath.equals("tcp/ssl")) { + // Skip these input types + continue; + } else if (hasCreateLink) { + // Found an InputKind leaf + InputKind newKind = InputKind.create(relpath); + kinds.add(newKind); + } else { + Set<InputKind> subKinds = assembleInputKindSet(thisSubPath); + kinds.addAll(subKinds); + } + } + return kinds; + } + + /** + * Refreshes the {@code inputKinds} field on this object. + */ + private void refreshInputKinds() { + Set<InputKind> kinds = assembleInputKindSet(new ArrayList<>()); + + this.inputKinds.clear(); + this.inputKinds.addAll(kinds); + } + + /** + * Refreshes this input collection. + * + * @return The refreshed {@code InputCollection}. + */ + @Override public InputCollection refresh() { + // Populate this.inputKinds + refreshInputKinds(); + + items.clear(); + + // Iterate over all input kinds and collect all instances. + for (InputKind kind : this.inputKinds) { + if (service.versionIsAtLeast("6.0.0")) { + // In Splunk 6 and later, the registry endpoint has been deprecated in favor of the new + // WinRegMon modular input, but both now point to the same place. To avoid duplicates, we have + // to read only one of them. + if (kind.getKind().equals("registry")) { + continue; + } + } + String relpath = kind.getRelativePath(); + String inputs = String.format("%s/%s?count=-1", path, relpath); + ResponseMessage response; + try { + response = service.get(inputs); + } + catch (HttpException e) { + // On some platforms certain input endpoints don't exist, for + // example the Windows inputs endpoints don't exist on non- + // Windows platforms. + if (e.getStatus() == 404) continue; + throw e; + } + AtomFeed feed; + try { + feed = AtomFeed.parseStream(response.getContent()); + } catch (Exception e) { + throw new RuntimeException(e); + } + load(feed); + } + + return this; + } + + /** + * {@inheritDoc} + */ + @Override public Input remove(String key) { + Input input = retrieveInput(key); + if (input != null) { + input.remove(); + } + return input; + } + + /** + * {@inheritDoc} + */ + @Override public Input remove( + String key, Args namespace) { + Util.ensureNamespaceIsExact(namespace); + + Input input = retrieveInput(key, namespace); + if (input != null) { + input.remove(); + } + return input; + } + + private Input retrieveInput(String key) { + validate(); + + // Because scripted input names are not 1:1 with the original name + // (they are the absolute path on the Splunk instance followed by + // the original name), we will iterate over the entities in the list, + // and if we find one that matches, return it. + Set<Entry<String, LinkedList<Input>>> set = items.entrySet(); + for (Entry<String, LinkedList<Input>> entry: set) { + String entryKey = entry.getKey(); + LinkedList<Input> entryValue = entry.getValue(); + InputKind kind = entryValue.get(0).getKind(); + + if (InputCollection.matchesInputName(kind, key, entryKey)) { + if (entryValue.size() > 1) { + throw new SplunkException(SplunkException.AMBIGUOUS, + "Multiple inputs matched " + key + "; specify a namespace to disambiguate."); + } else { + return entryValue.get(0); + } + } + } + return null; + } + + private Input retrieveInput(String key, Args namespace) { + Util.ensureNamespaceIsExact(namespace); + validate(); + + // Because scripted input names are not 1:1 with the original name + // (they are the absolute path on the Splunk instance followed by + // the original name), we will iterate over the entities in the list, + // and if we find one that matches, return it. + String pathMatcher = service.fullpath("", namespace); + Set<Entry<String, LinkedList<Input>>> set = items.entrySet(); + for (Entry<String, LinkedList<Input>> entry: set) { + String entryKey = entry.getKey(); + LinkedList<Input> entryValue = entry.getValue(); + InputKind kind = entryValue.get(0).getKind(); + + if (InputCollection.matchesInputName(kind, key, entryKey)) { + for (Input entity: entryValue) { + if (entity.path.startsWith(pathMatcher)) { + return entity; + } + } + } + } + return null; + } +} + diff --git a/splunk/src/main/java/com/splunk/LicensePool.java b/splunk/src/main/java/com/splunk/LicensePool.java index 613fcaac..527d0554 100644 --- a/splunk/src/main/java/com/splunk/LicensePool.java +++ b/splunk/src/main/java/com/splunk/LicensePool.java @@ -1,173 +1,173 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** -* The {@code LicensePool} class represents a license pool, which is made up -* of a single license master and zero or more license slave instances of Splunk -* that are configured to use the licensing volume from a set license or license - * stack. - */ -public class LicensePool extends Entity { - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The license pool endpoint. - */ - LicensePool(Service service, String path) { - super(service, path); - } - - /** - * Returns the description of this license pool. - * - * @return The description, or {@code null} if not specified. - */ - public String getDescription() { - return getString("description", null); - } - - /** - * Returns the indexing quota for this license pool. - * - * @return A string containing the indexing quota in bytes, or "MAX" to - * indicate the maximum amount that is allowed. - */ - public String getQuota() { - return getString("quota", "0"); - } - - /** - * Returns the list of slaves for this license pool. - * - * @return A comma-separated list of slaves by ID, or {@code null} if not - * specified. - */ - public String[] getSlaves() { - if (toUpdate.containsKey("slaves")) { - String value = (String)toUpdate.get("slaves"); - return value.split(","); - } - else { - return getStringArray("slaves", null); - } - } - - /** - * Returns the usage of indexing volume by slave licenses in this license - * pool. - * - * @return A map from each slave GUID to the number of bytes it is using. - */ - public Map<String, Long> getSlavesUsageBytes() { - @SuppressWarnings("unchecked") - HashMap<String, Object> values = (HashMap<String, Object>)get("slaves_usage_bytes"); - if (values == null) { - values = new HashMap<String, Object>(); - } - - HashMap<String, Long> usageBytes = new HashMap<String, Long>(); - - for(String key : values.keySet()) { - String value = (String)values.get(key); - usageBytes.put(key, Long.parseLong(value)); - } - - return usageBytes; - } - - /** - * Returns the stack ID for this license pool. Valid values are: - * <ul> - * <li>"download-trial"</li> - * <li>"enterprise"</li> - * <li>"forwarder"</li> - * <li>"free"</li></ul> - * - * @return The license pool stack ID, or {@code null} if not specified. - */ - public String getStackId() { - return getString("stack_id", null); - } - - /** - * Returns the usage of indexing volume for this license pool. - * - * @return This license pool's usage, in bytes. - */ - public long getUsedBytes() { - return getLong("used_bytes", 0); - } - - /** - * Sets whether to append or overwrite slaves to this license pool. - * - * @param appendSlaves {@code true} to append slaves, {@code false} to - * overwrite slaves. - */ - public void setAppendSlaves(boolean appendSlaves) { - setCacheValue("append_slaves", appendSlaves); - } - - /** - * Sets the description of this license pool. - * - * @param description The description. - */ - public void setDescription(String description) { - setCacheValue("description", description); - } - - /** - * Sets the byte quota of this license pool. - * - * @param quota The indexing quota of this license pool, specified as: - * <ul><li><i>number</i></li> - * <li><i>number</i> followed by "MB" or "GB" (for example, "10GB")</li> - * <li>"MAX" (Only one license pool can have "MAX" size in a stack.)</li> - * </ul> - */ - public void setQuota(String quota) { - setCacheValue("quota", quota); - } - - /** - * Sets the list of slaves that are members of this license pool. - * - * @param slaves The comma-separated list of slaves. Use an asterisk ("*") - * to accept all slaves. - */ - public void setSlaves(String slaves) { - setCacheValue("slaves", slaves); - } - - /** - * Sets the list of slaves that are members of this license pool. - * - * @param slaves The array of slaves. To accept all slaves, use an - * array with a single asterisk element ("*"). - */ - public void setSlaves(String[] slaves) { - setSlaves(Util.join(",", slaves)); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** +* The {@code LicensePool} class represents a license pool, which is made up +* of a single license master and zero or more license slave instances of Splunk +* that are configured to use the licensing volume from a set license or license + * stack. + */ +public class LicensePool extends Entity { + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The license pool endpoint. + */ + LicensePool(Service service, String path) { + super(service, path); + } + + /** + * Returns the description of this license pool. + * + * @return The description, or {@code null} if not specified. + */ + public String getDescription() { + return getString("description", null); + } + + /** + * Returns the indexing quota for this license pool. + * + * @return A string containing the indexing quota in bytes, or "MAX" to + * indicate the maximum amount that is allowed. + */ + public String getQuota() { + return getString("quota", "0"); + } + + /** + * Returns the list of slaves for this license pool. + * + * @return A comma-separated list of slaves by ID, or {@code null} if not + * specified. + */ + public String[] getSlaves() { + if (toUpdate.containsKey("slaves")) { + String value = (String)toUpdate.get("slaves"); + return value.split(","); + } + else { + return getStringArray("slaves", null); + } + } + + /** + * Returns the usage of indexing volume by slave licenses in this license + * pool. + * + * @return A map from each slave GUID to the number of bytes it is using. + */ + public Map<String, Long> getSlavesUsageBytes() { + @SuppressWarnings("unchecked") + HashMap<String, Object> values = (HashMap<String, Object>)get("slaves_usage_bytes"); + if (values == null) { + values = new HashMap<>(); + } + + HashMap<String, Long> usageBytes = new HashMap<>(); + + for(String key : values.keySet()) { + String value = (String)values.get(key); + usageBytes.put(key, Long.parseLong(value)); + } + + return usageBytes; + } + + /** + * Returns the stack ID for this license pool. Valid values are: + * <ul> + * <li>"download-trial"</li> + * <li>"enterprise"</li> + * <li>"forwarder"</li> + * <li>"free"</li></ul> + * + * @return The license pool stack ID, or {@code null} if not specified. + */ + public String getStackId() { + return getString("stack_id", null); + } + + /** + * Returns the usage of indexing volume for this license pool. + * + * @return This license pool's usage, in bytes. + */ + public long getUsedBytes() { + return getLong("used_bytes", 0); + } + + /** + * Sets whether to append or overwrite slaves to this license pool. + * + * @param appendSlaves {@code true} to append slaves, {@code false} to + * overwrite slaves. + */ + public void setAppendSlaves(boolean appendSlaves) { + setCacheValue("append_slaves", appendSlaves); + } + + /** + * Sets the description of this license pool. + * + * @param description The description. + */ + public void setDescription(String description) { + setCacheValue("description", description); + } + + /** + * Sets the byte quota of this license pool. + * + * @param quota The indexing quota of this license pool, specified as: + * <ul><li><i>number</i></li> + * <li><i>number</i> followed by "MB" or "GB" (for example, "10GB")</li> + * <li>"MAX" (Only one license pool can have "MAX" size in a stack.)</li> + * </ul> + */ + public void setQuota(String quota) { + setCacheValue("quota", quota); + } + + /** + * Sets the list of slaves that are members of this license pool. + * + * @param slaves The comma-separated list of slaves. Use an asterisk ("*") + * to accept all slaves. + */ + public void setSlaves(String slaves) { + setCacheValue("slaves", slaves); + } + + /** + * Sets the list of slaves that are members of this license pool. + * + * @param slaves The array of slaves. To accept all slaves, use an + * array with a single asterisk element ("*"). + */ + public void setSlaves(String[] slaves) { + setSlaves(Util.join(",", slaves)); + } +} diff --git a/splunk/src/main/java/com/splunk/ModularInputKind.java b/splunk/src/main/java/com/splunk/ModularInputKind.java index 5cc6f2a5..73253990 100644 --- a/splunk/src/main/java/com/splunk/ModularInputKind.java +++ b/splunk/src/main/java/com/splunk/ModularInputKind.java @@ -1,111 +1,111 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ -package com.splunk; - -import java.util.HashMap; -import java.util.Map; -import java.util.List; - -/** - * The {@code ModularInputKind} class represents a particular modular input. - * The actual inputs of this kind can be accessed from the - * {@code InputCollection} object. - */ -public class ModularInputKind extends Entity { - protected Map<String, Map<String,String>> args; - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The entity's endpoint. - */ - ModularInputKind(Service service, String path) { - super(service, path); - Map<String, Map<String, Map<String,String>>> endpoint = - (Map<String, Map<String, Map<String,String>>>)get("endpoint"); - this.args = endpoint.get("args"); - } - - /** - * Returns an argument map that contains the argument names as keys, and the - * {@code ModularInputKindArgument}s as corresponding values. - * - * @return A {@code Map} containing the argument key-value pairs. - */ - public Map<String, ModularInputKindArgument> getArguments() { - Map<String, ModularInputKindArgument> arguments = new HashMap<String, ModularInputKindArgument>(); - for (String argumentName : args.keySet()) { - arguments.put(argumentName, getArgument(argumentName)); - } - return arguments; - } - - /** - * Returns the streaming mode of this modular input kind. - * - * @return The streaming mode ("xml" or "simple"). - */ - public String getStreamingMode() { - String mode = getString("streaming_mode"); - return mode; - } - - /** - * Returns a map-like object representing a particular argument of this - * modular input kind. - * - * @param argumentName The name of the argument to retrieve. - * @return A {@code ModularInputKindArgument} object representing the given - * argument, or {@code null} if the argument does not exist. - */ - public ModularInputKindArgument getArgument(String argumentName) { - if (this.args.get(argumentName) != null) { - return new ModularInputKindArgument(this.args.get(argumentName)); - } else { - return null; - } - } - - /** - * Returns the description of this modular input kind. - * - * @return A string containing the description. - */ - public String getDescription() { - return getString("description", null); - } - - /** - * Returns the title of this modular input kind, which is also displayed in - * Splunk Web (rather than the name used in the REST API). - * - * @return A string containing the title. - */ - public String getTitle() { - return getString("title", null); - } - - /** - * Indicates whether this modular input kind has a given argument. - * - * @param argumentName The argument to look up. - * @return {@code true} if the argument exists, {@code false} if not. - */ - public boolean hasArgument(String argumentName) { - return this.args.containsKey(argumentName); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.splunk; + +import java.util.HashMap; +import java.util.Map; +import java.util.List; + +/** + * The {@code ModularInputKind} class represents a particular modular input. + * The actual inputs of this kind can be accessed from the + * {@code InputCollection} object. + */ +public class ModularInputKind extends Entity { + protected Map<String, Map<String,String>> args; + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The entity's endpoint. + */ + ModularInputKind(Service service, String path) { + super(service, path); + Map<String, Map<String, Map<String,String>>> endpoint = + (Map<String, Map<String, Map<String,String>>>)get("endpoint"); + this.args = endpoint.get("args"); + } + + /** + * Returns an argument map that contains the argument names as keys, and the + * {@code ModularInputKindArgument}s as corresponding values. + * + * @return A {@code Map} containing the argument key-value pairs. + */ + public Map<String, ModularInputKindArgument> getArguments() { + Map<String, ModularInputKindArgument> arguments = new HashMap<>(); + for (String argumentName : args.keySet()) { + arguments.put(argumentName, getArgument(argumentName)); + } + return arguments; + } + + /** + * Returns the streaming mode of this modular input kind. + * + * @return The streaming mode ("xml" or "simple"). + */ + public String getStreamingMode() { + String mode = getString("streaming_mode"); + return mode; + } + + /** + * Returns a map-like object representing a particular argument of this + * modular input kind. + * + * @param argumentName The name of the argument to retrieve. + * @return A {@code ModularInputKindArgument} object representing the given + * argument, or {@code null} if the argument does not exist. + */ + public ModularInputKindArgument getArgument(String argumentName) { + if (this.args.get(argumentName) != null) { + return new ModularInputKindArgument(this.args.get(argumentName)); + } else { + return null; + } + } + + /** + * Returns the description of this modular input kind. + * + * @return A string containing the description. + */ + public String getDescription() { + return getString("description", null); + } + + /** + * Returns the title of this modular input kind, which is also displayed in + * Splunk Web (rather than the name used in the REST API). + * + * @return A string containing the title. + */ + public String getTitle() { + return getString("title", null); + } + + /** + * Indicates whether this modular input kind has a given argument. + * + * @param argumentName The argument to look up. + * @return {@code true} if the argument exists, {@code false} if not. + */ + public boolean hasArgument(String argumentName) { + return this.args.containsKey(argumentName); + } +} diff --git a/splunk/src/main/java/com/splunk/PasswordCollection.java b/splunk/src/main/java/com/splunk/PasswordCollection.java index 47a50a22..de4faa7b 100644 --- a/splunk/src/main/java/com/splunk/PasswordCollection.java +++ b/splunk/src/main/java/com/splunk/PasswordCollection.java @@ -1,152 +1,152 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -/** - * The {@code PasswordCollection} class represents a collection of credentials. - */ -public class PasswordCollection extends EntityCollection<Password> { - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - */ - PasswordCollection(Service service) { - super(service, service.passwordEndPoint, Password.class); - } - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - */ - PasswordCollection(Service service, Args args) { - super(service, service.passwordEndPoint, Password.class, args); - } - - /** - * Creates a credential with a username and password. - * - * @param name The username. - * @param password The password. - * - * @return The new credential. - */ - public Password create(String name, String password) { - if(checkForWildcards()){ - throw new IllegalArgumentException("While creating StoragePasswords, namespace cannot have wildcards."); - } - Args args = new Args("password", password); - return create(name, args); - } - - /** - * Creates a credential with a username, password, and realm. - * - * @param name The username. - * @param password The password. - * @param realm The credential realm. - * @return The new credential. - */ - public Password create(String name, String password, String realm) { - if(checkForWildcards()){ - throw new IllegalArgumentException("While creating StoragePasswords, namespace cannot have wildcards."); - } - Args args = new Args(); - args.put("password", password); - args.put("realm", realm); - return create(name, args); - } - - /** - * Get a credential with realm and name. - * - * @param realm The credential realm. - * @param name The username. - * @return The credential, or null if not found. - */ - public Password get(String realm, String name) { - return super.get(String.format("%s:%s:", realm, name)); - } - - @Override - public Password get(Object key) { - // Make it compatible with the old way (low-efficient) - if (key instanceof String && !((String) key).contains(":")) { - return getByUsername((String) key); - } - return super.get(key); - } - - /** - * Remove a credential with realm and name. - * - * @param realm The credential realm. - * @param name The username. - * @return The removed credential, or null if not found. - */ - public Password remove(String realm, String name) { - if(checkForWildcards()){ - throw new IllegalArgumentException("app context must be specified when removing a password."); - } - return super.remove(String.format("%s:%s:", realm, name)); - } - - @Override - public Password remove(String key) { - if(checkForWildcards()){ - throw new IllegalArgumentException("app context must be specified when removing a password."); - } - // Make it compatible with the old way (low-efficient) - if (!key.contains(":")) { - Password password = getByUsername((String) key); - validate(); - if (password == null) return null; - password.remove(); - // by invalidating any access to items will get refreshed - invalidate(); - return password; - } - return super.remove(key); - } - - @Override - public boolean containsKey(Object key) { - if (key instanceof String && !((String) key).contains(":")) { - return getByUsername((String) key) != null; - } - return super.containsKey(key); - } - - private Password getByUsername(String name) { - for (Password password : this.values()) { - if (password.getUsername().equals(name)) return password; - } - return null; - } - - private boolean checkForWildcards(){ - boolean isWildCard = false; - if(("-").equals(service.getOwner()) || ("-").equals(service.getApp())){ - isWildCard = true; - } - return isWildCard; - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +/** + * The {@code PasswordCollection} class represents a collection of credentials. + */ +public class PasswordCollection extends EntityCollection<Password> { + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + */ + PasswordCollection(Service service) { + super(service, service.passwordEndPoint, Password.class); + } + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + */ + PasswordCollection(Service service, Args args) { + super(service, service.passwordEndPoint, Password.class, args); + } + + /** + * Creates a credential with a username and password. + * + * @param name The username. + * @param password The password. + * + * @return The new credential. + */ + public Password create(String name, String password) { + if(checkForWildcards()){ + throw new IllegalArgumentException("While creating StoragePasswords, namespace cannot have wildcards."); + } + Args args = new Args("password", password); + return create(name, args); + } + + /** + * Creates a credential with a username, password, and realm. + * + * @param name The username. + * @param password The password. + * @param realm The credential realm. + * @return The new credential. + */ + public Password create(String name, String password, String realm) { + if(checkForWildcards()){ + throw new IllegalArgumentException("While creating StoragePasswords, namespace cannot have wildcards."); + } + Args args = new Args(); + args.put("password", password); + args.put("realm", realm); + return create(name, args); + } + + /** + * Get a credential with realm and name. + * + * @param realm The credential realm. + * @param name The username. + * @return The credential, or null if not found. + */ + public Password get(String realm, String name) { + return super.get(String.format("%s:%s:", realm, name)); + } + + @Override + public Password get(Object key) { + // Make it compatible with the old way (low-efficient) + if (key instanceof String keyInst && !keyInst.contains(":")) { + return getByUsername(keyInst); + } + return super.get(key); + } + + /** + * Remove a credential with realm and name. + * + * @param realm The credential realm. + * @param name The username. + * @return The removed credential, or null if not found. + */ + public Password remove(String realm, String name) { + if(checkForWildcards()){ + throw new IllegalArgumentException("app context must be specified when removing a password."); + } + return super.remove(String.format("%s:%s:", realm, name)); + } + + @Override + public Password remove(String key) { + if(checkForWildcards()){ + throw new IllegalArgumentException("app context must be specified when removing a password."); + } + // Make it compatible with the old way (low-efficient) + if (!key.contains(":")) { + Password password = getByUsername((String) key); + validate(); + if (password == null) return null; + password.remove(); + // by invalidating any access to items will get refreshed + invalidate(); + return password; + } + return super.remove(key); + } + + @Override + public boolean containsKey(Object key) { + if (key instanceof String keyInst && !keyInst.contains(":")) { + return getByUsername(keyInst) != null; + } + return super.containsKey(key); + } + + private Password getByUsername(String name) { + for (Password password : this.values()) { + if (password.getUsername().equals(name)) return password; + } + return null; + } + + private boolean checkForWildcards(){ + boolean isWildCard = false; + if(("-").equals(service.getOwner()) || ("-").equals(service.getApp())){ + isWildCard = true; + } + return isWildCard; + } +} diff --git a/splunk/src/main/java/com/splunk/PivotSpecification.java b/splunk/src/main/java/com/splunk/PivotSpecification.java index 1aa154a9..c9ef4156 100644 --- a/splunk/src/main/java/com/splunk/PivotSpecification.java +++ b/splunk/src/main/java/com/splunk/PivotSpecification.java @@ -1,491 +1,491 @@ -/* - * Copyright 2014 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ -package com.splunk; - -import com.google.gson.*; - -import java.util.*; - -/** - * PivotSpecification represents a pivot to be done on a particular data model object. The user creates a - * PivotSpecification on some data model object, adds filters, row splits, column splits, and cell values, - * then calls the pivot method to query splunkd and get a set of SPL queries corresponding to this specification. - */ -public class PivotSpecification { - private static GsonBuilder gson = new GsonBuilder(); - - private DataModelObject dataModelObject; - private String accelerationNamespace = null; - - private List<PivotColumnSplit> columns = new ArrayList<PivotColumnSplit>(); - private List<PivotFilter> filters = new ArrayList<PivotFilter>(); - private List<PivotCellValue> cells = new ArrayList<PivotCellValue>(); - private List<PivotRowSplit> rows = new ArrayList<PivotRowSplit>(); - - PivotSpecification(DataModelObject dataModelObject) { - this.dataModelObject = dataModelObject; - if (dataModelObject.getDataModel().isAccelerated()) { - this.accelerationNamespace = dataModelObject.getDataModel().getName(); - } else { - this.accelerationNamespace = null; - } - } - - /** - * Set the namespace to use for this acceleration, usually the name of a data model. A value of null will set no - * namespace for acceleration. - * - * @param namespace a string specifying a namespace. - * @return PivotSpecification instance - */ - public PivotSpecification setAccelerationNamespace(String namespace) { - this.accelerationNamespace = namespace; - return this; - } - - /** - * Set a job with a query ending in tscollect, usually generated by createLocalAccelerationJob on a - * DataModelObject instance, as the acceleration cache for this pivot. - * - * @param sid the SID of a job. - * @return PivotSpecification instance - */ - public PivotSpecification setAccelerationJob(String sid) { - if (sid == null) { - throw new IllegalArgumentException("Sid to use for acceleration must not be null."); - } else { - this.accelerationNamespace = "sid=" + sid; - } - return this; - } - - /** - * Set a job with a query ending in tscollect, usually generated by createLocalAccelerationJob on a - * DataModelObject instance, as the acceleration cache for this pivot. - * - * @param job a Job object. - * @return PivotSpecification instance - */ - public PivotSpecification setAccelerationJob(Job job) { - setAccelerationJob(job.getSid()); - return this; - } - - /** - * @return the acceleration namespace to use in this pivot. - */ - public String getAccelerationNamespace() { - return this.accelerationNamespace; - } - - private void assertCorrectlyTypedField(String fieldName, FieldType[] acceptableTypes) { - DataModelField field = this.dataModelObject.getField(fieldName); - if (field == null) { - throw new IllegalArgumentException("No such field named " + fieldName); - } else if (!Arrays.asList(acceptableTypes).contains(field.getType())) { - StringBuilder errorMessage = new StringBuilder(); - errorMessage.append("Expected a field of one of the following types: "); - boolean first = true; - for (FieldType t : acceptableTypes) { - if (!first) errorMessage.append(", "); - errorMessage.append(t.toString()); - first = false; - } - errorMessage.append("; found type " + field.getType().toString()); - throw new IllegalArgumentException(errorMessage.toString()); - } - } - - private void assertCorrectlyTypedField(String field, FieldType acceptableType) { - assertCorrectlyTypedField(field, new FieldType[] { acceptableType }); - } - - /** - * Add a filter on a boolean valued field. The filter will be a constraint of the form - * - * field `comparison` compareTo - * - * for example - * - * is_remote = false - * - * @param field the name of the field - * @param comparison a comparison operator for the filter - * @param compareTo the value to compare the field to - * @return the PivotSpecification you are operating on. - */ - public PivotSpecification addFilter(String field, BooleanComparison comparison, boolean compareTo) { - assertCorrectlyTypedField(field, FieldType.BOOLEAN); - - BooleanPivotFilter filter = new BooleanPivotFilter(this.dataModelObject, field, comparison, compareTo); - filters.add(filter); - - return this; - } - - /** - * Add a filter on a string valued field. The filter will be a constraint of the form - * - * field `comparison` compareTo - * - * for example - * - * host startswith 'boris' - * - * @param field the name of the field - * @param comparison a comparison operator for the filter - * @param comparisonValue the value to compare the field to - * @return the PivotSpecification you are operating on. - */ - public PivotSpecification addFilter(String field, StringComparison comparison, String comparisonValue) { - assertCorrectlyTypedField(field, FieldType.STRING); - - StringPivotFilter filter = new StringPivotFilter(this.dataModelObject, field, comparison, comparisonValue); - filters.add(filter); - - return this; - } - - /** - * Add a filter on an IPv4 valued field. The filter will be a constraint of the form - * - * field `comparison` compareTo - * - * for example - * - * hostip = 192.168.100.12 - * - * @param field the name of the field - * @param comparison a comparison operator for the filter - * @param comparisonValue the value to compare the field to - * @return the PivotSpecification you are operating on. - */ - public PivotSpecification addFilter(String field, IPv4Comparison comparison, String comparisonValue) { - assertCorrectlyTypedField(field, FieldType.IPV4); - - IPv4PivotFilter filter = new IPv4PivotFilter(this.dataModelObject, field, comparison, comparisonValue); - filters.add(filter); - - return this; - } - - /** - * Add a filter on a numeric field. The filter will be a constraint of the form - * - * field `comparison` compareTo - * - * for example - * - * {@code height > 6} - * - * @param field the name of the field - * @param comparison a comparison operator for the filter - * @param comparisonValue the value to compare the field to - * @return the PivotSpecification you are operating on. - */ - public PivotSpecification addFilter(String field, NumberComparison comparison, double comparisonValue) { - assertCorrectlyTypedField(field, FieldType.NUMBER); - - NumberPivotFilter filter = new NumberPivotFilter(this.dataModelObject, field, comparison, comparisonValue); - filters.add(filter); - - return this; - } - - /** - * Add a filter that limits the number of values of an aggregated field that will be allowed - * into the pivot. - * - * @param field the name of a field - * @param sortAttribute field to aggregate for limiting - * @param sortDirection whether to take the lowest or highest values of the aggregated field - * @param limit how many values of the aggregated field to take - * @param statsFunction the function to use for aggregation - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addFilter(String field, String sortAttribute, - SortDirection sortDirection, int limit, StatsFunction statsFunction) { - if (!dataModelObject.containsField(field)) { - throw new IllegalArgumentException("No such field " + sortAttribute); - } - assertCorrectlyTypedField( - sortAttribute, - new FieldType[] { FieldType.STRING, FieldType.NUMBER, FieldType.OBJECTCOUNT } - ); - - LimitPivotFilter filter = new LimitPivotFilter(this.dataModelObject, field, sortAttribute, - sortDirection, limit, statsFunction); - filters.add(filter); - - return this; - } - - /** - * Add a row split on a numeric or string valued field, splitting on each distinct value of the field. - * - * @param field name of the field to split on - * @param label a human readable name for this set of rows - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addRowSplit(String field, String label) { - assertCorrectlyTypedField(field, new FieldType[] { FieldType.NUMBER, FieldType.STRING }); - - FieldType t = this.dataModelObject.getField(field).getType(); - if (t == FieldType.NUMBER) { - rows.add(new NumberPivotRowSplit(this.dataModelObject, field, label)); - } else if (t == FieldType.STRING) { - rows.add(new StringPivotRowSplit(this.dataModelObject, field, label)); - } else { - throw new IllegalArgumentException("Field not of type number or string despite precondition asserting so."); - } - - return this; - } - - /** - * Add a row split on a numeric field, splitting into numeric ranges. - * - * This split generates bins with edges equivalent to the - * classic loop {@code 'for i in <start> to <end> by <step>' } but with a maximum - * number of bins {@code <limit> }. This dispatches to the stats and xyseries search commands. - * See their documentation for more details. - * - * - * @param field The field to split on - * @param label a human readable name for this set of rows - * @param start the value of the start of the first range, or null to take the lowest value in the events. - * @param end the value for the end of the last range, or null to take the highest value in the events. - * @param step the width of each range, or null to have Splunk calculate it. - * @param limit the maximum number of ranges to split into, or null for no limit. - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addRowSplit(String field, String label, Integer start, Integer end, - Integer step, Integer limit) { - assertCorrectlyTypedField(field, FieldType.NUMBER); - - PivotRowSplit split = new RangePivotRowSplit(this.dataModelObject, field, label, start, end, step, limit); - rows.add(split); - - return this; - } - - /** - * Add a row split on a boolean valued field. - * - * @param field String value - * @param label String value - * @param trueDisplayValue the string to display in the true valued row label. - * @param falseDisplayValue the string to display in the false valued row label; - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addRowSplit(String field, String label, - String trueDisplayValue, String falseDisplayValue) { - assertCorrectlyTypedField(field, FieldType.BOOLEAN); - - PivotRowSplit split = new BooleanPivotRowSplit(this.dataModelObject, field, label, - trueDisplayValue, falseDisplayValue); - rows.add(split); - - return this; - } - - /** - * Add a row split on a timestamp valued field, binned by the specified bucket size. - * - * @param field the name of the field to split on. - * @param label a human readable name for this set of rows - * @param binning the size of bins to use - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addRowSplit(String field, String label, TimestampBinning binning) { - assertCorrectlyTypedField(field, FieldType.TIMESTAMP); - - PivotRowSplit split = new TimestampPivotRowSplit(this.dataModelObject, field, label, binning); - rows.add(split); - - return this; - } - - /** - * Add a column split on a string or number valued field, producing a column for - * each distinct value of the field. - * - * @param field the field to split on. - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addColumnSplit(String field) { - assertCorrectlyTypedField(field, new FieldType[] { FieldType.NUMBER, FieldType.STRING }); - - FieldType t = this.dataModelObject.getField(field).getType(); - - if (t == FieldType.NUMBER) { - columns.add(new NumericPivotColumnSplit(this.dataModelObject, field)); - } else if (t == FieldType.STRING) { - columns.add(new StringPivotColumnSplit(this.dataModelObject, field)); - } - - return this; - } - - /** - * Add a column split on a numeric field, splitting the values into ranges. - * - * @param field the field to split on. - * @param start the value of the start of the first range, or null to take the lowest value in the events. - * @param end the value for the end of the last range, or null to take the highest value in the events. - * @param step the width of each range, or null to have Splunk calculate it. - * @param limit the maximum number of ranges to split into, or null for no limit. - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addColumnSplit(String field, Integer start, Integer end, Integer step, Integer limit) { - assertCorrectlyTypedField(field, FieldType.NUMBER); - - PivotColumnSplit split = new RangePivotColumnSplit(this.dataModelObject, field, start, end, step, limit); - - columns.add(split); - return this; - } - - /** - * Add a column split on a boolean valued field. - * - * @param field the field to split on. - * @param trueDisplayValue the string to display in the true valued column label. - * @param falseDisplayValue the string to display in the false valued column label. - * @return the PivotSpecification you are working on. - */ - public PivotSpecification addColumnSplit(String field, String trueDisplayValue, String falseDisplayValue) { - assertCorrectlyTypedField(field, FieldType.BOOLEAN); - - PivotColumnSplit split = new BooleanPivotColumnSplit(this.dataModelObject, field, - trueDisplayValue, falseDisplayValue); - - columns.add(split); - return this; - } - - /** - * Add a column split on a timestamp valued field. - * - * @param field the field to split on. - * @param binning what time periods to use for binning valued of the field. - * @return the PivotSpecification you are working on. - */ - public PivotSpecification addColumnSplit(String field, TimestampBinning binning) { - assertCorrectlyTypedField(field, FieldType.TIMESTAMP); - - PivotColumnSplit split = new TimestampPivotColumnSplit(this.dataModelObject, field, binning); - - columns.add(split); - return this; - } - - /** - * Add an aggregate to each cell of the pivot. - * - * @param field the field to aggregate. - * @param label a human readable name for this aggregate. - * @param statsFunction the function to use for aggregation. - * @return the PivotSpecification you are working on. - */ - public PivotSpecification addCellValue(String field, String label, StatsFunction statsFunction) { - cells.add(new PivotCellValue(this.dataModelObject, field, label, statsFunction)); - - return this; - } - - /** - * @return a JSON serialization of this object. - */ - JsonObject toJson() { - JsonObject root = new JsonObject(); - - root.addProperty("dataModel", this.dataModelObject.getDataModel().getName()); - root.addProperty("baseClass", this.dataModelObject.getName()); - - JsonArray filterArray = new JsonArray(); - for (PivotFilter p : filters) { filterArray.add(p.toJson()); } - root.add("filters", filterArray); - - JsonArray rowsplitArray = new JsonArray(); - for (PivotRowSplit p : rows) { rowsplitArray.add(p.toJson()); } - root.add("rows", rowsplitArray); - - JsonArray cellvalueArray = new JsonArray(); - for (PivotCellValue p : cells) { cellvalueArray.add(p.toJson()); } - root.add("cells", cellvalueArray); - - JsonArray columnsplitArray = new JsonArray(); - for (PivotColumnSplit p : columns) { columnsplitArray.add(p.toJson()); } - root.add("columns", columnsplitArray); - - return root; - } - - /** - * @return a collection of all the filters added to this PivotSpecification. - */ - public Collection<PivotFilter> getFilters() { - return Collections.unmodifiableCollection(this.filters); - } - - /** - * @return a collection of all the row splits added to this PivotSpecification. - */ - public Collection<PivotRowSplit> getRowSplits() { - return Collections.unmodifiableCollection(this.rows); - } - - /** - * @return a collection of all the column splits added to this PivotSpecification. - */ - public Collection<PivotColumnSplit> getColumnSplits() { - return Collections.unmodifiableCollection(this.columns); - } - - /** - * @return a collection of all the cell values added to this PivotSpecification. - */ - public Collection<PivotCellValue> getCellValues() { - return Collections.unmodifiableCollection(this.cells); - } - - /** - * Query Splunk for SPL queries corresponding to this pivot. - * - * @return a Pivot object encapsulating the returned queries. - */ - public Pivot pivot() { - Service service = this.dataModelObject.getDataModel().getService(); - - Args args = new Args(); - args.put("pivot_json", toJson()); - if (this.accelerationNamespace != null) { - args.put("namespace", this.accelerationNamespace); - } - - ResponseMessage response = service.get( - "datamodel/pivot/" + this.dataModelObject.getDataModel().getName(), - args - ); - - if (response.getStatus() != 200) { - throw HttpException.create(response); - } else { - return Pivot.parseStream(service, response.getContent()); - } - } -} +/* + * Copyright 2014 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.splunk; + +import com.google.gson.*; + +import java.util.*; + +/** + * PivotSpecification represents a pivot to be done on a particular data model object. The user creates a + * PivotSpecification on some data model object, adds filters, row splits, column splits, and cell values, + * then calls the pivot method to query splunkd and get a set of SPL queries corresponding to this specification. + */ +public class PivotSpecification { + private static GsonBuilder gson = new GsonBuilder(); + + private DataModelObject dataModelObject; + private String accelerationNamespace = null; + + private List<PivotColumnSplit> columns = new ArrayList<>(); + private List<PivotFilter> filters = new ArrayList<>(); + private List<PivotCellValue> cells = new ArrayList<>(); + private List<PivotRowSplit> rows = new ArrayList<>(); + + PivotSpecification(DataModelObject dataModelObject) { + this.dataModelObject = dataModelObject; + if (dataModelObject.getDataModel().isAccelerated()) { + this.accelerationNamespace = dataModelObject.getDataModel().getName(); + } else { + this.accelerationNamespace = null; + } + } + + /** + * Set the namespace to use for this acceleration, usually the name of a data model. A value of null will set no + * namespace for acceleration. + * + * @param namespace a string specifying a namespace. + * @return PivotSpecification instance + */ + public PivotSpecification setAccelerationNamespace(String namespace) { + this.accelerationNamespace = namespace; + return this; + } + + /** + * Set a job with a query ending in tscollect, usually generated by createLocalAccelerationJob on a + * DataModelObject instance, as the acceleration cache for this pivot. + * + * @param sid the SID of a job. + * @return PivotSpecification instance + */ + public PivotSpecification setAccelerationJob(String sid) { + if (sid == null) { + throw new IllegalArgumentException("Sid to use for acceleration must not be null."); + } else { + this.accelerationNamespace = "sid=" + sid; + } + return this; + } + + /** + * Set a job with a query ending in tscollect, usually generated by createLocalAccelerationJob on a + * DataModelObject instance, as the acceleration cache for this pivot. + * + * @param job a Job object. + * @return PivotSpecification instance + */ + public PivotSpecification setAccelerationJob(Job job) { + setAccelerationJob(job.getSid()); + return this; + } + + /** + * @return the acceleration namespace to use in this pivot. + */ + public String getAccelerationNamespace() { + return this.accelerationNamespace; + } + + private void assertCorrectlyTypedField(String fieldName, FieldType[] acceptableTypes) { + DataModelField field = this.dataModelObject.getField(fieldName); + if (field == null) { + throw new IllegalArgumentException("No such field named " + fieldName); + } else if (!Arrays.asList(acceptableTypes).contains(field.getType())) { + StringBuilder errorMessage = new StringBuilder(); + errorMessage.append("Expected a field of one of the following types: "); + boolean first = true; + for (FieldType t : acceptableTypes) { + if (!first) errorMessage.append(", "); + errorMessage.append(t.toString()); + first = false; + } + errorMessage.append("; found type " + field.getType().toString()); + throw new IllegalArgumentException(errorMessage.toString()); + } + } + + private void assertCorrectlyTypedField(String field, FieldType acceptableType) { + assertCorrectlyTypedField(field, new FieldType[] { acceptableType }); + } + + /** + * Add a filter on a boolean valued field. The filter will be a constraint of the form + * + * field `comparison` compareTo + * + * for example + * + * is_remote = false + * + * @param field the name of the field + * @param comparison a comparison operator for the filter + * @param compareTo the value to compare the field to + * @return the PivotSpecification you are operating on. + */ + public PivotSpecification addFilter(String field, BooleanComparison comparison, boolean compareTo) { + assertCorrectlyTypedField(field, FieldType.BOOLEAN); + + BooleanPivotFilter filter = new BooleanPivotFilter(this.dataModelObject, field, comparison, compareTo); + filters.add(filter); + + return this; + } + + /** + * Add a filter on a string valued field. The filter will be a constraint of the form + * + * field `comparison` compareTo + * + * for example + * + * host startswith 'boris' + * + * @param field the name of the field + * @param comparison a comparison operator for the filter + * @param comparisonValue the value to compare the field to + * @return the PivotSpecification you are operating on. + */ + public PivotSpecification addFilter(String field, StringComparison comparison, String comparisonValue) { + assertCorrectlyTypedField(field, FieldType.STRING); + + StringPivotFilter filter = new StringPivotFilter(this.dataModelObject, field, comparison, comparisonValue); + filters.add(filter); + + return this; + } + + /** + * Add a filter on an IPv4 valued field. The filter will be a constraint of the form + * + * field `comparison` compareTo + * + * for example + * + * hostip = 192.168.100.12 + * + * @param field the name of the field + * @param comparison a comparison operator for the filter + * @param comparisonValue the value to compare the field to + * @return the PivotSpecification you are operating on. + */ + public PivotSpecification addFilter(String field, IPv4Comparison comparison, String comparisonValue) { + assertCorrectlyTypedField(field, FieldType.IPV4); + + IPv4PivotFilter filter = new IPv4PivotFilter(this.dataModelObject, field, comparison, comparisonValue); + filters.add(filter); + + return this; + } + + /** + * Add a filter on a numeric field. The filter will be a constraint of the form + * + * field `comparison` compareTo + * + * for example + * + * {@code height > 6} + * + * @param field the name of the field + * @param comparison a comparison operator for the filter + * @param comparisonValue the value to compare the field to + * @return the PivotSpecification you are operating on. + */ + public PivotSpecification addFilter(String field, NumberComparison comparison, double comparisonValue) { + assertCorrectlyTypedField(field, FieldType.NUMBER); + + NumberPivotFilter filter = new NumberPivotFilter(this.dataModelObject, field, comparison, comparisonValue); + filters.add(filter); + + return this; + } + + /** + * Add a filter that limits the number of values of an aggregated field that will be allowed + * into the pivot. + * + * @param field the name of a field + * @param sortAttribute field to aggregate for limiting + * @param sortDirection whether to take the lowest or highest values of the aggregated field + * @param limit how many values of the aggregated field to take + * @param statsFunction the function to use for aggregation + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addFilter(String field, String sortAttribute, + SortDirection sortDirection, int limit, StatsFunction statsFunction) { + if (!dataModelObject.containsField(field)) { + throw new IllegalArgumentException("No such field " + sortAttribute); + } + assertCorrectlyTypedField( + sortAttribute, + new FieldType[] { FieldType.STRING, FieldType.NUMBER, FieldType.OBJECTCOUNT } + ); + + LimitPivotFilter filter = new LimitPivotFilter(this.dataModelObject, field, sortAttribute, + sortDirection, limit, statsFunction); + filters.add(filter); + + return this; + } + + /** + * Add a row split on a numeric or string valued field, splitting on each distinct value of the field. + * + * @param field name of the field to split on + * @param label a human readable name for this set of rows + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addRowSplit(String field, String label) { + assertCorrectlyTypedField(field, new FieldType[] { FieldType.NUMBER, FieldType.STRING }); + + FieldType t = this.dataModelObject.getField(field).getType(); + if (t == FieldType.NUMBER) { + rows.add(new NumberPivotRowSplit(this.dataModelObject, field, label)); + } else if (t == FieldType.STRING) { + rows.add(new StringPivotRowSplit(this.dataModelObject, field, label)); + } else { + throw new IllegalArgumentException("Field not of type number or string despite precondition asserting so."); + } + + return this; + } + + /** + * Add a row split on a numeric field, splitting into numeric ranges. + * + * This split generates bins with edges equivalent to the + * classic loop {@code 'for i in <start> to <end> by <step>' } but with a maximum + * number of bins {@code <limit> }. This dispatches to the stats and xyseries search commands. + * See their documentation for more details. + * + * + * @param field The field to split on + * @param label a human readable name for this set of rows + * @param start the value of the start of the first range, or null to take the lowest value in the events. + * @param end the value for the end of the last range, or null to take the highest value in the events. + * @param step the width of each range, or null to have Splunk calculate it. + * @param limit the maximum number of ranges to split into, or null for no limit. + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addRowSplit(String field, String label, Integer start, Integer end, + Integer step, Integer limit) { + assertCorrectlyTypedField(field, FieldType.NUMBER); + + PivotRowSplit split = new RangePivotRowSplit(this.dataModelObject, field, label, start, end, step, limit); + rows.add(split); + + return this; + } + + /** + * Add a row split on a boolean valued field. + * + * @param field String value + * @param label String value + * @param trueDisplayValue the string to display in the true valued row label. + * @param falseDisplayValue the string to display in the false valued row label; + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addRowSplit(String field, String label, + String trueDisplayValue, String falseDisplayValue) { + assertCorrectlyTypedField(field, FieldType.BOOLEAN); + + PivotRowSplit split = new BooleanPivotRowSplit(this.dataModelObject, field, label, + trueDisplayValue, falseDisplayValue); + rows.add(split); + + return this; + } + + /** + * Add a row split on a timestamp valued field, binned by the specified bucket size. + * + * @param field the name of the field to split on. + * @param label a human readable name for this set of rows + * @param binning the size of bins to use + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addRowSplit(String field, String label, TimestampBinning binning) { + assertCorrectlyTypedField(field, FieldType.TIMESTAMP); + + PivotRowSplit split = new TimestampPivotRowSplit(this.dataModelObject, field, label, binning); + rows.add(split); + + return this; + } + + /** + * Add a column split on a string or number valued field, producing a column for + * each distinct value of the field. + * + * @param field the field to split on. + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addColumnSplit(String field) { + assertCorrectlyTypedField(field, new FieldType[] { FieldType.NUMBER, FieldType.STRING }); + + FieldType t = this.dataModelObject.getField(field).getType(); + + if (t == FieldType.NUMBER) { + columns.add(new NumericPivotColumnSplit(this.dataModelObject, field)); + } else if (t == FieldType.STRING) { + columns.add(new StringPivotColumnSplit(this.dataModelObject, field)); + } + + return this; + } + + /** + * Add a column split on a numeric field, splitting the values into ranges. + * + * @param field the field to split on. + * @param start the value of the start of the first range, or null to take the lowest value in the events. + * @param end the value for the end of the last range, or null to take the highest value in the events. + * @param step the width of each range, or null to have Splunk calculate it. + * @param limit the maximum number of ranges to split into, or null for no limit. + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addColumnSplit(String field, Integer start, Integer end, Integer step, Integer limit) { + assertCorrectlyTypedField(field, FieldType.NUMBER); + + PivotColumnSplit split = new RangePivotColumnSplit(this.dataModelObject, field, start, end, step, limit); + + columns.add(split); + return this; + } + + /** + * Add a column split on a boolean valued field. + * + * @param field the field to split on. + * @param trueDisplayValue the string to display in the true valued column label. + * @param falseDisplayValue the string to display in the false valued column label. + * @return the PivotSpecification you are working on. + */ + public PivotSpecification addColumnSplit(String field, String trueDisplayValue, String falseDisplayValue) { + assertCorrectlyTypedField(field, FieldType.BOOLEAN); + + PivotColumnSplit split = new BooleanPivotColumnSplit(this.dataModelObject, field, + trueDisplayValue, falseDisplayValue); + + columns.add(split); + return this; + } + + /** + * Add a column split on a timestamp valued field. + * + * @param field the field to split on. + * @param binning what time periods to use for binning valued of the field. + * @return the PivotSpecification you are working on. + */ + public PivotSpecification addColumnSplit(String field, TimestampBinning binning) { + assertCorrectlyTypedField(field, FieldType.TIMESTAMP); + + PivotColumnSplit split = new TimestampPivotColumnSplit(this.dataModelObject, field, binning); + + columns.add(split); + return this; + } + + /** + * Add an aggregate to each cell of the pivot. + * + * @param field the field to aggregate. + * @param label a human readable name for this aggregate. + * @param statsFunction the function to use for aggregation. + * @return the PivotSpecification you are working on. + */ + public PivotSpecification addCellValue(String field, String label, StatsFunction statsFunction) { + cells.add(new PivotCellValue(this.dataModelObject, field, label, statsFunction)); + + return this; + } + + /** + * @return a JSON serialization of this object. + */ + JsonObject toJson() { + JsonObject root = new JsonObject(); + + root.addProperty("dataModel", this.dataModelObject.getDataModel().getName()); + root.addProperty("baseClass", this.dataModelObject.getName()); + + JsonArray filterArray = new JsonArray(); + for (PivotFilter p : filters) { filterArray.add(p.toJson()); } + root.add("filters", filterArray); + + JsonArray rowsplitArray = new JsonArray(); + for (PivotRowSplit p : rows) { rowsplitArray.add(p.toJson()); } + root.add("rows", rowsplitArray); + + JsonArray cellvalueArray = new JsonArray(); + for (PivotCellValue p : cells) { cellvalueArray.add(p.toJson()); } + root.add("cells", cellvalueArray); + + JsonArray columnsplitArray = new JsonArray(); + for (PivotColumnSplit p : columns) { columnsplitArray.add(p.toJson()); } + root.add("columns", columnsplitArray); + + return root; + } + + /** + * @return a collection of all the filters added to this PivotSpecification. + */ + public Collection<PivotFilter> getFilters() { + return Collections.unmodifiableCollection(this.filters); + } + + /** + * @return a collection of all the row splits added to this PivotSpecification. + */ + public Collection<PivotRowSplit> getRowSplits() { + return Collections.unmodifiableCollection(this.rows); + } + + /** + * @return a collection of all the column splits added to this PivotSpecification. + */ + public Collection<PivotColumnSplit> getColumnSplits() { + return Collections.unmodifiableCollection(this.columns); + } + + /** + * @return a collection of all the cell values added to this PivotSpecification. + */ + public Collection<PivotCellValue> getCellValues() { + return Collections.unmodifiableCollection(this.cells); + } + + /** + * Query Splunk for SPL queries corresponding to this pivot. + * + * @return a Pivot object encapsulating the returned queries. + */ + public Pivot pivot() { + Service service = this.dataModelObject.getDataModel().getService(); + + Args args = new Args(); + args.put("pivot_json", toJson()); + if (this.accelerationNamespace != null) { + args.put("namespace", this.accelerationNamespace); + } + + ResponseMessage response = service.get( + "datamodel/pivot/" + this.dataModelObject.getDataModel().getName(), + args + ); + + if (response.getStatus() != 200) { + throw HttpException.create(response); + } else { + return Pivot.parseStream(service, response.getContent()); + } + } +} diff --git a/splunk/src/main/java/com/splunk/RequestMessage.java b/splunk/src/main/java/com/splunk/RequestMessage.java index 816e1fe2..27dda740 100644 --- a/splunk/src/main/java/com/splunk/RequestMessage.java +++ b/splunk/src/main/java/com/splunk/RequestMessage.java @@ -1,114 +1,114 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.OutputStream; -import java.util.Map; -import java.util.TreeMap; - -/** - * The {@code RequestMessage} class represents an HTTP request message including - * method, headers, and body content. - */ -public class RequestMessage { - String method = "GET"; // "GET" | "PUT" | "POST" | "DELETE" - Map<String, String> header = null; - Object content = null; - - /** Creates a new {@code RequestMessage} instance. */ - public RequestMessage() {} - - /** - * Creates a new {@code RequestMessage} instance with a given method - * - * @param method String value - */ - public RequestMessage(String method) { - this.method = method; - } - - /** - * Indicates whether the given value is a supported HTTP method. - * - * @param value The value to check. - * @return {@code true} if the value is a supported method, - * {@code false} if not. - */ - boolean checkMethod(String value) { - return - value.equalsIgnoreCase("GET") || - value.equalsIgnoreCase("PUT") || - value.equalsIgnoreCase("POST") || - value.equalsIgnoreCase("DELETE"); - } - - /** - * Returns a map of message headers. - * - * @return A {@code Map} of message headers. - */ - public Map<String, String> getHeader() { - if (this.header == null) - this.header = new TreeMap<String, String>( - String.CASE_INSENSITIVE_ORDER); - return this.header; - } - - /** - * Returns the message's HTTP method. - * - * @return The HTTP method. - */ - public String getMethod() { - return this.method; - } - - /** - * Sets the message's HTTP method. - * - * @param value The HTTP method. - */ - public void setMethod(String value) { - value = value.toUpperCase(); - if (!checkMethod(value)) - throw new IllegalArgumentException(); - this.method = value; - } - - /** - * Returns the message body content. - * - * @return The message content. - */ - public Object getContent() { - return this.content; - } - - /** - * Sets the message body content. - * - * @param value The message content. - */ - public void setContent(String value) { - this.content = value; - } - - public void setContent(OutputStream value) { - this.content = value; - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.OutputStream; +import java.util.Map; +import java.util.TreeMap; + +/** + * The {@code RequestMessage} class represents an HTTP request message including + * method, headers, and body content. + */ +public class RequestMessage { + String method = "GET"; // "GET" | "PUT" | "POST" | "DELETE" + Map<String, String> header = null; + Object content = null; + + /** Creates a new {@code RequestMessage} instance. */ + public RequestMessage() {} + + /** + * Creates a new {@code RequestMessage} instance with a given method + * + * @param method String value + */ + public RequestMessage(String method) { + this.method = method; + } + + /** + * Indicates whether the given value is a supported HTTP method. + * + * @param value The value to check. + * @return {@code true} if the value is a supported method, + * {@code false} if not. + */ + boolean checkMethod(String value) { + return + value.equalsIgnoreCase("GET") || + value.equalsIgnoreCase("PUT") || + value.equalsIgnoreCase("POST") || + value.equalsIgnoreCase("DELETE"); + } + + /** + * Returns a map of message headers. + * + * @return A {@code Map} of message headers. + */ + public Map<String, String> getHeader() { + if (this.header == null) + this.header = new TreeMap<>( + String.CASE_INSENSITIVE_ORDER); + return this.header; + } + + /** + * Returns the message's HTTP method. + * + * @return The HTTP method. + */ + public String getMethod() { + return this.method; + } + + /** + * Sets the message's HTTP method. + * + * @param value The HTTP method. + */ + public void setMethod(String value) { + value = value.toUpperCase(); + if (!checkMethod(value)) + throw new IllegalArgumentException(); + this.method = value; + } + + /** + * Returns the message body content. + * + * @return The message content. + */ + public Object getContent() { + return this.content; + } + + /** + * Sets the message body content. + * + * @param value The message content. + */ + public void setContent(String value) { + this.content = value; + } + + public void setContent(OutputStream value) { + this.content = value; + } +} + diff --git a/splunk/src/main/java/com/splunk/ResourceCollection.java b/splunk/src/main/java/com/splunk/ResourceCollection.java index b81ecb8a..b1f076f9 100644 --- a/splunk/src/main/java/com/splunk/ResourceCollection.java +++ b/splunk/src/main/java/com/splunk/ResourceCollection.java @@ -1,386 +1,386 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.util.*; - -/** - * The {@code ResourceCollection} abstract base class represents a collection of - * Splunk resources. - * - * @param <T> The type of members of the collection. - */ -public class ResourceCollection<T extends Resource> - extends Resource implements Map<String, T> -{ - protected LinkedHashMap<String, LinkedList<T>> - items = new LinkedHashMap<String, LinkedList<T>>(); - protected Class itemClass; - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The target endpoint. - * @param itemClass The class of this resource item. - */ - ResourceCollection(Service service, String path, Class itemClass) { - super(service, path); - this.itemClass = itemClass; - } - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The target endpoint. - * @param itemClass The class of this resource item. - * @param args Collection arguments that specify the number of entities to - * return and how to sort them (see {@link CollectionArgs}). - */ - ResourceCollection( - Service service, String path, Class itemClass, Args args) { - super(service, path, args); - this.itemClass = itemClass; - } - - /** {@inheritDoc} */ - public void clear() { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - public boolean containsKey(Object key) { - return validate().items.containsKey(key); - } - - /** - * Determines whether a scoped, namespace-constrained key - * exists within this collection. - * - * @param key The key to look up. - * @param namespace The namespace to constrain the search to. - * @return {@code true} if the key exists, {@code false} if not. - */ - public boolean containsKey(Object key, Args namespace) { - Util.ensureNamespaceIsExact(namespace); - validate(); - - LinkedList<T> entities = items.get(key); - if (entities == null || entities.size() == 0) return false; - String pathMatcher = service.fullpath("", namespace); - for (T entity: entities) { - if (entity.path.startsWith(pathMatcher)) { - return true; - } - } - return false; - } - - /** {@inheritDoc} */ - public boolean containsValue(Object value) { - // value should be a non-linked-list value; values are stored as linked - // lists inside our container. - LinkedList<Object> linkedList = new LinkedList<Object>(); - linkedList.add(value); - return validate().items.containsValue(linkedList); - } - - static Class[] itemSig = new Class[] { Service.class, String.class }; - - /** - * Creates a collection member. - * - * @param itemClass The class of the member to create. - * @param path The path to the member resource. - * @param namespace The namespace. - * @return The new member. - */ - protected T createItem(Class itemClass, String path, Args namespace) { - Constructor constructor; - try { - constructor = itemClass.getDeclaredConstructor(itemSig); - } - catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } - - T item; - try { - while (true) { - Object obj = constructor.newInstance(service, service.fullpath(path, namespace)); - //if (obj instanceof Message) { // We ignore messages sent back inline. - // continue; - //} else { - item = (T)obj; - break; - //} - } - } - catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - catch (InvocationTargetException e) { - throw new RuntimeException(e.getTargetException()); - } - catch (InstantiationException e) { - throw new RuntimeException(e); - } - - return item; - } - - /** - * Creates a collection member corresponding to a given - * Atom entry. This base implementation uses the class object that was - * passed in when the generic {@code ResourceCollection} was created. - * Subclasses may override this method to provide alternative means of - * instantiating collection members. - * - * @param entry The {@code AtomEntry} corresponding to the member to - * instantiate. - * @return The new member. - */ - protected T createItem(AtomEntry entry) { - return createItem(itemClass, itemPath(entry), namespace(entry)); - } - - /** {@inheritDoc} */ - public Set<Map.Entry<String, T>> entrySet() { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - @Override public boolean equals(Object o) { - return validate().items.equals(o); - } - - /** - * Gets the value of a given key, if it exists within this collection. - * - * @param key The key to look up. - * @return The value indexed by the key, or {@code null} if it doesn't - * exist. - * @throws SplunkException The exception to throw if there is more than one - * value represented by this key. - */ - public T get(Object key) { - validate(); - LinkedList<T> entities = items.get(key); - if (entities != null && entities.size() > 1) { - throw new SplunkException(SplunkException.AMBIGUOUS, - "Key has multiple values, specify a namespace"); - } - if (entities == null || entities.size() == 0) return null; - return entities.get(0); - } - - /** - * Gets a the value of a scoped, namespace-constrained key, if it exists - * within this collection. - * - * @param key The key to look up. - * @param namespace The namespace to constrain the search to. - * @return The value indexed by the key, or {@code null} if it doesn't - * exist. - */ - public T get(Object key, Args namespace) { - Util.ensureNamespaceIsExact(namespace); - validate(); - - LinkedList<T> entities = items.get(key); - if (entities == null || entities.size() == 0) return null; - String pathMatcher = service.fullpath("", namespace); - for (T entity: entities) { - if (entity.path.startsWith(pathMatcher)) { - return entity; - } - } - return null; - } - - @Override public int hashCode() { - return validate().items.hashCode(); - } - - /** {@inheritDoc} */ - public boolean isEmpty() { - return validate().items.isEmpty(); - } - - /** - * Returns the value to use as the key from a given Atom entry. - * Subclasses may override this value for collections that use something - * other than "title" as the key. - * - * @param entry The {@code AtomEntry} corresponding to the collection - * member. - * @return The value to use as the member's key. - */ - protected String itemKey(AtomEntry entry) { - return entry.title; - } - - /** - * Returns the value to use as the member's path from a given Atom entry. - * Subclasses may override this value to support alternative methods of - * determining a member's path. - * - * @param entry The {@code AtomEntry} corresponding to the collection - * member. - * @return The value to use as the member's path. - */ - protected String itemPath(AtomEntry entry) { - return entry.links.get("alternate"); - } - - private Args namespace(AtomEntry entry) { - Args namespace = new Args(); - - // no content? return an empty namespace. - if (entry.content == null) - return namespace; - - HashMap<String, String> entityMetadata = - (HashMap<String, String>)entry.content.get("eai:acl"); - - // If there is no ACL info, we just create an empty map - if (entityMetadata == null) { - entityMetadata = new HashMap<String, String>(); - } - - if (entityMetadata.containsKey("owner")) - namespace.put("owner", entityMetadata.get("owner")); - if (entityMetadata.containsKey("app")) - namespace.put("app", entityMetadata.get("app")); - if (entityMetadata.containsKey("sharing")) - namespace.put("sharing", entityMetadata.get("sharing")); - return namespace; - } - - /** {@inheritDoc} */ - public Set<String> keySet() { - return validate().items.keySet(); - } - - /** - * Issues an HTTP request to list the contents of the collection resource. - * - * @return The list response message. - */ - public ResponseMessage list() { - return service.get(path, this.refreshArgs); - } - - /** - * Loads the collection resource from a given Atom feed. - * - * @param value The {@code AtomFeed} instance to load the collection from. - * @return The current {@code ResourceCollection} instance. - */ - ResourceCollection<T> load(AtomFeed value) { - super.load(value); - for (AtomEntry entry : value.entries) { - String key = itemKey(entry); - T item = createItem(entry); - if (items.containsKey(key)) { - LinkedList<T> list = items.get(key); - list.add(item); - } else { - LinkedList<T> list = new LinkedList<T>(); - list.add(item); - items.put(key, list); - } - } - return this; - } - - /** {@inheritDoc} */ - public T put(String key, T value) { - throw new UnsupportedOperationException(); - } - - /** - * Copies all mappings from a given map to this map (unsupported). - * - * @param map The set of mappings to copy into this map. - */ - public void putAll(Map<? extends String, ? extends T> map) { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - @Override public ResourceCollection refresh() { - items.clear(); - ResponseMessage response = list(); - assert(response.getStatus() == 200); - - AtomFeed feed = null; - try { - feed = AtomFeed.parseStream(response.getContent()); - } catch (Exception e) { - throw new RuntimeException(e); - } - load(feed); - return this; - } - - /** {@inheritDoc} */ - public T remove(Object key) { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - public int size() { - return validate().items.size(); - } - - /** {@inheritDoc} */ - @Override public ResourceCollection<T> validate() { - super.validate(); - return this; - } - - /** {@inheritDoc} */ - public Collection<T> values() { - LinkedList<T> collection = new LinkedList<T>(); - validate(); - Set<String> keySet = items.keySet(); - for (String key: keySet) { - LinkedList<T> list = items.get(key); - for (T item: list) { - collection.add(item); - } - } - return collection; - } - - /** - * Returns the number of values that a specific key represents. - * - * @param key The key to look up. - * @return The number of entity values represented by the key. - */ - public int valueSize(Object key) { - validate(); - LinkedList<T> entities = items.get(key); - if (entities == null || entities.size() == 0) return 0; - return entities.size(); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.*; + +/** + * The {@code ResourceCollection} abstract base class represents a collection of + * Splunk resources. + * + * @param <T> The type of members of the collection. + */ +public class ResourceCollection<T extends Resource> + extends Resource implements Map<String, T> +{ + protected LinkedHashMap<String, LinkedList<T>> + items = new LinkedHashMap<>(); + protected Class itemClass; + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The target endpoint. + * @param itemClass The class of this resource item. + */ + ResourceCollection(Service service, String path, Class itemClass) { + super(service, path); + this.itemClass = itemClass; + } + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The target endpoint. + * @param itemClass The class of this resource item. + * @param args Collection arguments that specify the number of entities to + * return and how to sort them (see {@link CollectionArgs}). + */ + ResourceCollection( + Service service, String path, Class itemClass, Args args) { + super(service, path, args); + this.itemClass = itemClass; + } + + /** {@inheritDoc} */ + public void clear() { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + public boolean containsKey(Object key) { + return validate().items.containsKey(key); + } + + /** + * Determines whether a scoped, namespace-constrained key + * exists within this collection. + * + * @param key The key to look up. + * @param namespace The namespace to constrain the search to. + * @return {@code true} if the key exists, {@code false} if not. + */ + public boolean containsKey(Object key, Args namespace) { + Util.ensureNamespaceIsExact(namespace); + validate(); + + LinkedList<T> entities = items.get(key); + if (entities == null || entities.size() == 0) return false; + String pathMatcher = service.fullpath("", namespace); + for (T entity: entities) { + if (entity.path.startsWith(pathMatcher)) { + return true; + } + } + return false; + } + + /** {@inheritDoc} */ + public boolean containsValue(Object value) { + // value should be a non-linked-list value; values are stored as linked + // lists inside our container. + LinkedList<Object> linkedList = new LinkedList<>(); + linkedList.add(value); + return validate().items.containsValue(linkedList); + } + + static Class[] itemSig = new Class[] { Service.class, String.class }; + + /** + * Creates a collection member. + * + * @param itemClass The class of the member to create. + * @param path The path to the member resource. + * @param namespace The namespace. + * @return The new member. + */ + protected T createItem(Class itemClass, String path, Args namespace) { + Constructor constructor; + try { + constructor = itemClass.getDeclaredConstructor(itemSig); + } + catch (NoSuchMethodException e) { + throw new RuntimeException(e); + } + + T item; + try { + while (true) { + Object obj = constructor.newInstance(service, service.fullpath(path, namespace)); + //if (obj instanceof Message) { // We ignore messages sent back inline. + // continue; + //} else { + item = (T)obj; + break; + //} + } + } + catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + catch (InvocationTargetException e) { + throw new RuntimeException(e.getTargetException()); + } + catch (InstantiationException e) { + throw new RuntimeException(e); + } + + return item; + } + + /** + * Creates a collection member corresponding to a given + * Atom entry. This base implementation uses the class object that was + * passed in when the generic {@code ResourceCollection} was created. + * Subclasses may override this method to provide alternative means of + * instantiating collection members. + * + * @param entry The {@code AtomEntry} corresponding to the member to + * instantiate. + * @return The new member. + */ + protected T createItem(AtomEntry entry) { + return createItem(itemClass, itemPath(entry), namespace(entry)); + } + + /** {@inheritDoc} */ + public Set<Map.Entry<String, T>> entrySet() { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + @Override public boolean equals(Object o) { + return validate().items.equals(o); + } + + /** + * Gets the value of a given key, if it exists within this collection. + * + * @param key The key to look up. + * @return The value indexed by the key, or {@code null} if it doesn't + * exist. + * @throws SplunkException The exception to throw if there is more than one + * value represented by this key. + */ + public T get(Object key) { + validate(); + LinkedList<T> entities = items.get(key); + if (entities != null && entities.size() > 1) { + throw new SplunkException(SplunkException.AMBIGUOUS, + "Key has multiple values, specify a namespace"); + } + if (entities == null || entities.size() == 0) return null; + return entities.get(0); + } + + /** + * Gets a the value of a scoped, namespace-constrained key, if it exists + * within this collection. + * + * @param key The key to look up. + * @param namespace The namespace to constrain the search to. + * @return The value indexed by the key, or {@code null} if it doesn't + * exist. + */ + public T get(Object key, Args namespace) { + Util.ensureNamespaceIsExact(namespace); + validate(); + + LinkedList<T> entities = items.get(key); + if (entities == null || entities.size() == 0) return null; + String pathMatcher = service.fullpath("", namespace); + for (T entity: entities) { + if (entity.path.startsWith(pathMatcher)) { + return entity; + } + } + return null; + } + + @Override public int hashCode() { + return validate().items.hashCode(); + } + + /** {@inheritDoc} */ + public boolean isEmpty() { + return validate().items.isEmpty(); + } + + /** + * Returns the value to use as the key from a given Atom entry. + * Subclasses may override this value for collections that use something + * other than "title" as the key. + * + * @param entry The {@code AtomEntry} corresponding to the collection + * member. + * @return The value to use as the member's key. + */ + protected String itemKey(AtomEntry entry) { + return entry.title; + } + + /** + * Returns the value to use as the member's path from a given Atom entry. + * Subclasses may override this value to support alternative methods of + * determining a member's path. + * + * @param entry The {@code AtomEntry} corresponding to the collection + * member. + * @return The value to use as the member's path. + */ + protected String itemPath(AtomEntry entry) { + return entry.links.get("alternate"); + } + + private Args namespace(AtomEntry entry) { + Args namespace = new Args(); + + // no content? return an empty namespace. + if (entry.content == null) + return namespace; + + HashMap<String, String> entityMetadata = + (HashMap<String, String>)entry.content.get("eai:acl"); + + // If there is no ACL info, we just create an empty map + if (entityMetadata == null) { + entityMetadata = new HashMap<>(); + } + + if (entityMetadata.containsKey("owner")) + namespace.put("owner", entityMetadata.get("owner")); + if (entityMetadata.containsKey("app")) + namespace.put("app", entityMetadata.get("app")); + if (entityMetadata.containsKey("sharing")) + namespace.put("sharing", entityMetadata.get("sharing")); + return namespace; + } + + /** {@inheritDoc} */ + public Set<String> keySet() { + return validate().items.keySet(); + } + + /** + * Issues an HTTP request to list the contents of the collection resource. + * + * @return The list response message. + */ + public ResponseMessage list() { + return service.get(path, this.refreshArgs); + } + + /** + * Loads the collection resource from a given Atom feed. + * + * @param value The {@code AtomFeed} instance to load the collection from. + * @return The current {@code ResourceCollection} instance. + */ + ResourceCollection<T> load(AtomFeed value) { + super.load(value); + for (AtomEntry entry : value.entries) { + String key = itemKey(entry); + T item = createItem(entry); + if (items.containsKey(key)) { + LinkedList<T> list = items.get(key); + list.add(item); + } else { + LinkedList<T> list = new LinkedList<>(); + list.add(item); + items.put(key, list); + } + } + return this; + } + + /** {@inheritDoc} */ + public T put(String key, T value) { + throw new UnsupportedOperationException(); + } + + /** + * Copies all mappings from a given map to this map (unsupported). + * + * @param map The set of mappings to copy into this map. + */ + public void putAll(Map<? extends String, ? extends T> map) { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + @Override public ResourceCollection refresh() { + items.clear(); + ResponseMessage response = list(); + assert(response.getStatus() == 200); + + AtomFeed feed = null; + try { + feed = AtomFeed.parseStream(response.getContent()); + } catch (Exception e) { + throw new RuntimeException(e); + } + load(feed); + return this; + } + + /** {@inheritDoc} */ + public T remove(Object key) { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + public int size() { + return validate().items.size(); + } + + /** {@inheritDoc} */ + @Override public ResourceCollection<T> validate() { + super.validate(); + return this; + } + + /** {@inheritDoc} */ + public Collection<T> values() { + LinkedList<T> collection = new LinkedList<>(); + validate(); + Set<String> keySet = items.keySet(); + for (String key: keySet) { + LinkedList<T> list = items.get(key); + for (T item: list) { + collection.add(item); + } + } + return collection; + } + + /** + * Returns the number of values that a specific key represents. + * + * @param key The key to look up. + * @return The number of entity values represented by the key. + */ + public int valueSize(Object key) { + validate(); + LinkedList<T> entities = items.get(key); + if (entities == null || entities.size() == 0) return 0; + return entities.size(); + } +} diff --git a/splunk/src/main/java/com/splunk/ResponseMessage.java b/splunk/src/main/java/com/splunk/ResponseMessage.java index 6eb50c06..24089081 100644 --- a/splunk/src/main/java/com/splunk/ResponseMessage.java +++ b/splunk/src/main/java/com/splunk/ResponseMessage.java @@ -1,80 +1,80 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.InputStream; -import java.util.Map; -import java.util.TreeMap; - -/** - * The {@code ResponseMessage} class represents an HTTP response message that - * includes status codes, response headers, and body content. - */ -public class ResponseMessage { - int status; - Map<String, String> header = null; - InputStream content; - - /** - * Default class constructor. - */ - ResponseMessage() {} - - /** - * Class constructor. - * - * @param status The initial status. - */ - ResponseMessage(int status) { - this.status = status; - } - - ResponseMessage(int status, InputStream content) { - this.status = status; - this.content = content; - } - - /** - * Returns the body content stream. - * - * @return The content stream. - */ - public InputStream getContent() { - return this.content; - } - - /** - * Returns the response headers. - * - * @return Response headers. - */ - public Map<String, String> getHeader() { - if (this.header == null) - this.header = - new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER); - return this.header; - } - - /** - * Returns the response status. - * - * @return The response status. - */ - public int getStatus() { - return this.status; - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.InputStream; +import java.util.Map; +import java.util.TreeMap; + +/** + * The {@code ResponseMessage} class represents an HTTP response message that + * includes status codes, response headers, and body content. + */ +public class ResponseMessage { + int status; + Map<String, String> header = null; + InputStream content; + + /** + * Default class constructor. + */ + ResponseMessage() {} + + /** + * Class constructor. + * + * @param status The initial status. + */ + ResponseMessage(int status) { + this.status = status; + } + + ResponseMessage(int status, InputStream content) { + this.status = status; + this.content = content; + } + + /** + * Returns the body content stream. + * + * @return The content stream. + */ + public InputStream getContent() { + return this.content; + } + + /** + * Returns the response headers. + * + * @return Response headers. + */ + public Map<String, String> getHeader() { + if (this.header == null) + this.header = + new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + return this.header; + } + + /** + * Returns the response status. + * + * @return The response status. + */ + public int getStatus() { + return this.status; + } +} diff --git a/splunk/src/main/java/com/splunk/ResultsReaderJson.java b/splunk/src/main/java/com/splunk/ResultsReaderJson.java index 6544f614..fbbc9b83 100644 --- a/splunk/src/main/java/com/splunk/ResultsReaderJson.java +++ b/splunk/src/main/java/com/splunk/ResultsReaderJson.java @@ -247,7 +247,7 @@ public Collection<String> getFields(){ private Event readEvent() throws IOException { Event returnData = null; String name = null; - List<String> values = new ArrayList<String>(); + List<String> values = new ArrayList<>(); if (jsonReader == null) return null; diff --git a/splunk/src/main/java/com/splunk/ResultsReaderXml.java b/splunk/src/main/java/com/splunk/ResultsReaderXml.java index e242ff8e..a41e83b7 100644 --- a/splunk/src/main/java/com/splunk/ResultsReaderXml.java +++ b/splunk/src/main/java/com/splunk/ResultsReaderXml.java @@ -38,7 +38,7 @@ public class ResultsReaderXml extends ResultsReader { private XMLEventReader xmlReader = null; - private ArrayList<String> fields = new ArrayList<String>(); + private ArrayList<String> fields = new ArrayList<>(); private PushbackInputStream pushbackInputStream; /** @@ -296,7 +296,7 @@ private Event getResultKVPairs() XMLEvent xmlEvent; int eType; String key = null; - List<String> values = new ArrayList<String>(); + List<String> values = new ArrayList<>(); int level = 0; // Event results are flat, so extract k/v pairs based on XML indentation diff --git a/splunk/src/main/java/com/splunk/Service.java b/splunk/src/main/java/com/splunk/Service.java index 57609010..a655c02e 100644 --- a/splunk/src/main/java/com/splunk/Service.java +++ b/splunk/src/main/java/com/splunk/Service.java @@ -144,15 +144,15 @@ public Service(ServiceArgs args) { // NOTE: Must also read the underlying dictionary for forward compatibility. // (Consider the case where the user calls Map.put() directly, // rather than using the new setters.) - this.app = Args.<String>get(args, "app", args.app != null ? args.app : null); - this.host = Args.<String>get(args, "host", args.host != null ? args.host : DEFAULT_HOST); - this.owner = Args.<String>get(args, "owner", args.owner != null ? args.owner : null); + this.app = Args.get(args, "app", args.app != null ? args.app : null); + this.host = Args.get(args, "host", args.host != null ? args.host : DEFAULT_HOST); + this.owner = Args.get(args, "owner", args.owner != null ? args.owner : null); this.port = Args.<Integer>get(args, "port", args.port != null ? args.port : DEFAULT_PORT); - this.scheme = Args.<String>get(args, "scheme", args.scheme != null ? args.scheme : DEFAULT_SCHEME); - this.token = Args.<String>get(args, "token", args.token != null ? args.token : null); + this.scheme = Args.get(args, "scheme", args.scheme != null ? args.scheme : DEFAULT_SCHEME); + this.token = Args.get(args, "token", args.token != null ? args.token : null); this.username = (String)args.get("username"); this.password = (String)args.get("password"); - this.httpsHandler = Args.<URLStreamHandler>get(args, "httpsHandler", null); + this.httpsHandler = Args.get(args, "httpsHandler", null); this.setSslSecurityProtocol(Args.get(args, "SSLSecurityProtocol", Service.getSslSecurityProtocol())); this.addCookie((String)args.get("cookie")); this.setCustomHeaders((Map<String, String>) args.get("customHeaders")); @@ -165,15 +165,15 @@ public Service(ServiceArgs args) { */ public Service(Map<String, Object> args) { super(); - this.app = Args.<String>get(args, "app", null); - this.host = Args.<String>get(args, "host", DEFAULT_HOST); - this.owner = Args.<String>get(args, "owner", null); + this.app = Args.get(args, "app", null); + this.host = Args.get(args, "host", DEFAULT_HOST); + this.owner = Args.get(args, "owner", null); this.port = Args.<Integer>get(args, "port", DEFAULT_PORT); - this.scheme = Args.<String>get(args, "scheme", DEFAULT_SCHEME); - this.token = Args.<String>get(args, "token", null); + this.scheme = Args.get(args, "scheme", DEFAULT_SCHEME); + this.token = Args.get(args, "token", null); this.username = (String)args.get("username"); this.password = (String)args.get("password"); - this.httpsHandler = Args.<URLStreamHandler>get(args, "httpsHandler", null); + this.httpsHandler = Args.get(args, "httpsHandler", null); this.setSslSecurityProtocol(Args.get(args, "SSLSecurityProtocol", Service.getSslSecurityProtocol())); this.addCookie((String)args.get("cookie")); this.connectTimeout = Args.<Integer>get(args, "connectTimeout", null); @@ -348,7 +348,7 @@ public String getApp() { * @return The application collection. */ public EntityCollection<Application> getApplications() { - return new EntityCollection<Application>( + return new EntityCollection<>( this, "/services/apps/local", Application.class); } @@ -422,7 +422,7 @@ public EntityCollection<DeploymentServer> getDeploymentServers(Args args) { } else { path = ""; // TODO: Find out what this should be and fix it. } - return new EntityCollection<DeploymentServer>( + return new EntityCollection<>( this, "deployment/server", DeploymentServer.class, args); } @@ -450,7 +450,7 @@ public EntityCollection<DeploymentServerClass> getDeploymentServerClasses( } else { path = "deployment/server/serverclasses"; } - return new EntityCollection<DeploymentServerClass>( + return new EntityCollection<>( this, path, DeploymentServerClass.class, args); } @@ -471,7 +471,7 @@ public EntityCollection<DeploymentTenant> getDeploymentTenants() { * @return A collection of multi-tenant configurations. */ public EntityCollection<DeploymentTenant> getDeploymentTenants(Args args) { - return new EntityCollection<DeploymentTenant>( + return new EntityCollection<>( this, "deployment/tenants", DeploymentTenant.class, args); } @@ -507,7 +507,7 @@ public EntityCollection<DistributedPeer> getDistributedPeers() { * @return A collection of search peers. */ public EntityCollection<DistributedPeer> getDistributedPeers(Args args) { - return new EntityCollection<DistributedPeer>( + return new EntityCollection<>( this, "search/distributed/peers", DistributedPeer.class, args); } @@ -601,7 +601,7 @@ public ServiceInfo getInfo() { */ public List<String> getClusterMasters(){ Entity caps = new Entity(this, "cluster/config"); - List<String> hosts = new ArrayList<String>(); + List<String> hosts = new ArrayList<>(); try { String clusterMasterURIs = caps.getString("master_uri"); URL clusterMasterUrl; @@ -703,7 +703,7 @@ public EntityCollection<LicenseGroup> getLicenseGroups() { * @return A collection of license group configurations. */ public EntityCollection<LicenseGroup> getLicenseGroups(Args args) { - return new EntityCollection<LicenseGroup>( + return new EntityCollection<>( this, "licenser/groups", LicenseGroup.class, args); } @@ -724,7 +724,7 @@ public EntityCollection<LicenseMessage> getLicenseMessages() { * @return A collection of licenser messages. */ public EntityCollection<LicenseMessage> getLicenseMessages(Args args) { - return new EntityCollection<LicenseMessage>( + return new EntityCollection<>( this, "licenser/messages", LicenseMessage.class, args); } @@ -776,7 +776,7 @@ public EntityCollection<LicenseSlave> getLicenseSlaves() { * @return A collection of licenser slaves. */ public EntityCollection<LicenseSlave> getLicenseSlaves(Args args) { - return new EntityCollection<LicenseSlave>( + return new EntityCollection<>( this, "licenser/slaves", LicenseSlave.class, args); } @@ -797,7 +797,7 @@ public EntityCollection<LicenseStack> getLicenseStacks() { * @return A collection of license stack configurations. */ public EntityCollection<LicenseStack> getLicenseStacks(Args args) { - return new EntityCollection<LicenseStack>( + return new EntityCollection<>( this, "licenser/stacks", LicenseStack.class, args); } @@ -818,7 +818,7 @@ public EntityCollection<License> getLicenses() { * @return A collection of licenses. */ public EntityCollection<License> getLicenses(Args args) { - return new EntityCollection<License>( + return new EntityCollection<>( this, "licenser/licenses", License.class, args); } @@ -839,7 +839,7 @@ public EntityCollection<Logger> getLoggers() { * @return A collection of logging categories. */ public EntityCollection<Logger> getLoggers(Args args) { - return new EntityCollection<Logger>( + return new EntityCollection<>( this, "server/logger", Logger.class, args); } @@ -880,7 +880,7 @@ public ResourceCollection<ModularInputKind> getModularInputKinds() { * @return A collection of modular inputs. */ public ResourceCollection<ModularInputKind> getModularInputKinds(Args args) { - return new ResourceCollection<ModularInputKind>( + return new ResourceCollection<>( this, "data/modular-inputs", ModularInputKind.class, args); } @@ -910,7 +910,7 @@ public EntityCollection<OutputGroup> getOutputGroups() { * @return A collection of output group configurations. */ public EntityCollection<OutputGroup> getOutputGroups(Args args) { - return new EntityCollection<OutputGroup>( + return new EntityCollection<>( this, "data/outputs/tcp/group", OutputGroup.class, args); } @@ -931,7 +931,7 @@ public EntityCollection<OutputServer> getOutputServers() { * @return A collection of data-forwarding configurations. */ public EntityCollection<OutputServer> getOutputServers(Args args) { - return new EntityCollection<OutputServer>( + return new EntityCollection<>( this, "data/outputs/tcp/server", OutputServer.class, args); } @@ -954,7 +954,7 @@ public EntityCollection<OutputSyslog> getOutputSyslogs() { * @return A collection of syslog forwarders. */ public EntityCollection<OutputSyslog> getOutputSyslogs(Args args) { - return new EntityCollection<OutputSyslog>( + return new EntityCollection<>( this, "data/outputs/tcp/syslog", OutputSyslog.class, args); } @@ -1015,7 +1015,7 @@ public EntityCollection<Role> getRoles() { * @return A collection of user roles. */ public EntityCollection<Role> getRoles(Args args) { - return new EntityCollection<Role>( + return new EntityCollection<>( this, "authorization/roles", Role.class, args); } @@ -1098,7 +1098,7 @@ public EntityCollection<Upload> getUploads() { * @return A collection of in-progress oneshot uploads */ public EntityCollection<Upload> getUploads(Args namespace) { - return new EntityCollection<Upload>( + return new EntityCollection<>( this, "data/inputs/oneshot", Upload.class, namespace); } diff --git a/splunk/src/main/java/com/splunk/Settings.java b/splunk/src/main/java/com/splunk/Settings.java index 3cc28e55..db925a22 100644 --- a/splunk/src/main/java/com/splunk/Settings.java +++ b/splunk/src/main/java/com/splunk/Settings.java @@ -1,289 +1,289 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.HashMap; -import java.util.Map; - -/** - * The {@code Settings} class represents configuration information for an - * instance of Splunk. - */ -public class Settings extends Entity { - Settings(Service service) { - super(service, "server/settings"); - } - - /** - * Returns the fully-qualified path to the directory containing the - * default index for this instance of Splunk. - * - * @return The path to the Splunk index directory. - */ - public String getSplunkDB() { - return getString("SPLUNK_DB"); - } - - /** - * Returns the fully-qualified path to the Splunk installation directory. - * - * @return The path to the Splunk installation directory. - */ - public String getSplunkHome() { - return getString("SPLUNK_HOME"); - } - - /** - * Indicates whether SSL is enabled on the Splunk management port. - * - * @return {@code true} if SSL is enabled, {@code false} if not. - */ - public boolean getEnableSplunkWebSSL() { - return getBoolean("enableSplunkWebSSL"); - } - - /** - * Returns the default host name to use for data inputs. - * - * @return The host name. - */ - public String getHost() { - return getString("host", null); - } - - /** - * Returns the port on which Splunk Web is listening for this - * instance of Splunk. The port number defaults to 8000. - * - * @return The Splunk Web port number. - */ - public int getHttpPort() { - return getInteger("httpport"); - } - - /** - * Returns the IP address:port number for Splunkd. - * - * @return The IP address:port number. - */ - public int getMgmtPort() { - return getInteger("mgmtHostPort"); - } - - /** - * Returns the amount of free disk space that is required for Splunk - * to continue searching and indexing. - * - * @return The required amount of free disk space, in megabytes. - */ - public int getMinFreeSpace() { - return getInteger("minFreeSpace"); - } - - /** - * Returns the string that is prepended to the Splunk symmetric key to - * generate the final key that used to sign all traffic between master and - * slave licensers. - * - * @return Licenser symmetric key. - */ - public String getPass4SymmKey() { - return getString("pass4SymmKey"); - } - - /** - * Returns the name that is used to identify this Splunk instance for - * features such as distributed search. - * - * @return The name used to identify the Splunk instance. - */ - public String getServerName() { - return getString("serverName"); - } - - /** - * Returns the amount of time before a user session times out. - * - * @return The session time-out. - */ - public String getSessionTimeout() { - return getString("sessionTimeout"); - } - - /** - * Indicates whether the instance is configured to start Splunk Web. - * - * @return {@code true} if the instance is configured to start Splunk Web, - * {@code false} if Splunk Web is disabled. - */ - public boolean getStartWebServer() { - return getBoolean("startwebserver"); - } - - /** - * Returns the IP address of the authenticating proxy. - * - * @return The IP address of the authenticating proxy. - */ - public String getTrustedIP() { - return getString("trustedIP", null); - } - - /** - * Sets the fully-qualified local path to the default index. - * The default value is {@code $SPLUNK_HOME/var/lib/splunk/defaultdb/db/}. - * - * @param path The local path to the default index. - */ - public void setSplunkDBPath(String path) { - setCacheValue("SPLUNK_DB", path); - } - - /** - * Sets whether Splunk Web uses HTTP or HTTPS. - * - * @param useHttps {@code true} to use SSL and HTTPS, {@code false} to use - * HTTP. - */ - public void setEnableSplunkWebSSL(boolean useHttps) { - setCacheValue("enableSplunkWebSSL", useHttps); - } - - /** - * Sets the default host name to use for data inputs that do not override - * this setting. - * - * @param host The default host name. - */ - public void setHost(String host) { - setCacheValue("host", host); - } - - /** - * Sets the Splunk Web listening port. If Splunk uses SSL and HTTPS, this - * value should be set to the HTTPS port number. - * <p> - * <b>Note:</b> The port must be present for Splunk Web to start. If this - * value is omitted or set to 0, the server will not start an HTTP listener. - * @see #getEnableSplunkWebSSL - * - * @param port The Splunk Web listening port. - */ - public void setHttpPort(int port) { - setCacheValue("httpport", port); - } - - /** - * Sets the management port for splunkd. - * The default value is {@code 8089}. - * - * @param port The port for the management interface. - */ - public void setMgmtPort(int port) { - setCacheValue("mgmtHostPort", port); - } - - /** - * Sets the amount of free disk space that must exist for splunkd to - * continue operating. - * <p> - * Before attempting to run a search, Splunk requires this amount of - * free space on the file system where the dispatch directory is stored - * ({@code $SPLUNK_HOME/var/run/splunk/dispatch}). - * - * @param minFreeSpace The minimum free space, in megabytes. - */ - public void setMinimumFreeSpace(int minFreeSpace) { - setCacheValue("minFreeSpace", minFreeSpace); - } - - /** - * Sets the password string that is prepended to the Splunk symmetric key - * to generate the final key, which is used to sign all traffic between - * master/slave licensers. - * - * @param pass4SymmKey The prepended password string. - */ - public void setPasswordSymmKey(String pass4SymmKey) { - setCacheValue("pass4SymmKey", pass4SymmKey); - } - - /** - * Sets the name that is used to identify this Splunk instance for features - * such as distributed search. The default value is - * {@code <hostname>-<user running splunk>}. - * - * @param serverName The server name. - */ - public void setServerName(String serverName) { - setCacheValue("serverName", serverName); - } - - /** - * Sets the session timeout. - * The valid format is <i>number</i> followed by a time unit ("s", "h", - * or "d"). - * - * @param sessionTimeout The session timeout value. - */ - public void setSessionTimeout(String sessionTimeout) { - setCacheValue("sessionTimeout", sessionTimeout); - } - - /** - * Sets whether to start Splunk Web. - * - * @param startwebserver {@code true} to start Splunk Web, {@code false} if - * not. - */ - public void setStartWebServer(boolean startwebserver) { - setCacheValue("startwebserver", startwebserver); - } - - /** - * Sets the IP address of the authenticating proxy. Set this value to a - * valid IP address to enable SSO. - * - * This attribute is disabled by default. The normal value is "127.0.0.1". - * - * @param trustedIP The authenticating proxy's IP address. - */ - public void setTrustedIP(String trustedIP) { - setCacheValue("trustedIP", trustedIP); - } - - /** - * {@inheritDoc} - */ - @Override public void update(Map<String, Object> args) { - // Merge cached setters and live args together before updating. - HashMap<String, Object> mergedArgs = new HashMap<String, Object>(); - mergedArgs.putAll(toUpdate); - mergedArgs.putAll(args); - service.post(path + "/settings", mergedArgs); - toUpdate.clear(); - invalidate(); - } - - /** - * {@inheritDoc} - */ - @Override public void update() { - service.post(path + "/settings", toUpdate); - invalidate(); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.HashMap; +import java.util.Map; + +/** + * The {@code Settings} class represents configuration information for an + * instance of Splunk. + */ +public class Settings extends Entity { + Settings(Service service) { + super(service, "server/settings"); + } + + /** + * Returns the fully-qualified path to the directory containing the + * default index for this instance of Splunk. + * + * @return The path to the Splunk index directory. + */ + public String getSplunkDB() { + return getString("SPLUNK_DB"); + } + + /** + * Returns the fully-qualified path to the Splunk installation directory. + * + * @return The path to the Splunk installation directory. + */ + public String getSplunkHome() { + return getString("SPLUNK_HOME"); + } + + /** + * Indicates whether SSL is enabled on the Splunk management port. + * + * @return {@code true} if SSL is enabled, {@code false} if not. + */ + public boolean getEnableSplunkWebSSL() { + return getBoolean("enableSplunkWebSSL"); + } + + /** + * Returns the default host name to use for data inputs. + * + * @return The host name. + */ + public String getHost() { + return getString("host", null); + } + + /** + * Returns the port on which Splunk Web is listening for this + * instance of Splunk. The port number defaults to 8000. + * + * @return The Splunk Web port number. + */ + public int getHttpPort() { + return getInteger("httpport"); + } + + /** + * Returns the IP address:port number for Splunkd. + * + * @return The IP address:port number. + */ + public int getMgmtPort() { + return getInteger("mgmtHostPort"); + } + + /** + * Returns the amount of free disk space that is required for Splunk + * to continue searching and indexing. + * + * @return The required amount of free disk space, in megabytes. + */ + public int getMinFreeSpace() { + return getInteger("minFreeSpace"); + } + + /** + * Returns the string that is prepended to the Splunk symmetric key to + * generate the final key that used to sign all traffic between master and + * slave licensers. + * + * @return Licenser symmetric key. + */ + public String getPass4SymmKey() { + return getString("pass4SymmKey"); + } + + /** + * Returns the name that is used to identify this Splunk instance for + * features such as distributed search. + * + * @return The name used to identify the Splunk instance. + */ + public String getServerName() { + return getString("serverName"); + } + + /** + * Returns the amount of time before a user session times out. + * + * @return The session time-out. + */ + public String getSessionTimeout() { + return getString("sessionTimeout"); + } + + /** + * Indicates whether the instance is configured to start Splunk Web. + * + * @return {@code true} if the instance is configured to start Splunk Web, + * {@code false} if Splunk Web is disabled. + */ + public boolean getStartWebServer() { + return getBoolean("startwebserver"); + } + + /** + * Returns the IP address of the authenticating proxy. + * + * @return The IP address of the authenticating proxy. + */ + public String getTrustedIP() { + return getString("trustedIP", null); + } + + /** + * Sets the fully-qualified local path to the default index. + * The default value is {@code $SPLUNK_HOME/var/lib/splunk/defaultdb/db/}. + * + * @param path The local path to the default index. + */ + public void setSplunkDBPath(String path) { + setCacheValue("SPLUNK_DB", path); + } + + /** + * Sets whether Splunk Web uses HTTP or HTTPS. + * + * @param useHttps {@code true} to use SSL and HTTPS, {@code false} to use + * HTTP. + */ + public void setEnableSplunkWebSSL(boolean useHttps) { + setCacheValue("enableSplunkWebSSL", useHttps); + } + + /** + * Sets the default host name to use for data inputs that do not override + * this setting. + * + * @param host The default host name. + */ + public void setHost(String host) { + setCacheValue("host", host); + } + + /** + * Sets the Splunk Web listening port. If Splunk uses SSL and HTTPS, this + * value should be set to the HTTPS port number. + * <p> + * <b>Note:</b> The port must be present for Splunk Web to start. If this + * value is omitted or set to 0, the server will not start an HTTP listener. + * @see #getEnableSplunkWebSSL + * + * @param port The Splunk Web listening port. + */ + public void setHttpPort(int port) { + setCacheValue("httpport", port); + } + + /** + * Sets the management port for splunkd. + * The default value is {@code 8089}. + * + * @param port The port for the management interface. + */ + public void setMgmtPort(int port) { + setCacheValue("mgmtHostPort", port); + } + + /** + * Sets the amount of free disk space that must exist for splunkd to + * continue operating. + * <p> + * Before attempting to run a search, Splunk requires this amount of + * free space on the file system where the dispatch directory is stored + * ({@code $SPLUNK_HOME/var/run/splunk/dispatch}). + * + * @param minFreeSpace The minimum free space, in megabytes. + */ + public void setMinimumFreeSpace(int minFreeSpace) { + setCacheValue("minFreeSpace", minFreeSpace); + } + + /** + * Sets the password string that is prepended to the Splunk symmetric key + * to generate the final key, which is used to sign all traffic between + * master/slave licensers. + * + * @param pass4SymmKey The prepended password string. + */ + public void setPasswordSymmKey(String pass4SymmKey) { + setCacheValue("pass4SymmKey", pass4SymmKey); + } + + /** + * Sets the name that is used to identify this Splunk instance for features + * such as distributed search. The default value is + * {@code <hostname>-<user running splunk>}. + * + * @param serverName The server name. + */ + public void setServerName(String serverName) { + setCacheValue("serverName", serverName); + } + + /** + * Sets the session timeout. + * The valid format is <i>number</i> followed by a time unit ("s", "h", + * or "d"). + * + * @param sessionTimeout The session timeout value. + */ + public void setSessionTimeout(String sessionTimeout) { + setCacheValue("sessionTimeout", sessionTimeout); + } + + /** + * Sets whether to start Splunk Web. + * + * @param startwebserver {@code true} to start Splunk Web, {@code false} if + * not. + */ + public void setStartWebServer(boolean startwebserver) { + setCacheValue("startwebserver", startwebserver); + } + + /** + * Sets the IP address of the authenticating proxy. Set this value to a + * valid IP address to enable SSO. + * + * This attribute is disabled by default. The normal value is "127.0.0.1". + * + * @param trustedIP The authenticating proxy's IP address. + */ + public void setTrustedIP(String trustedIP) { + setCacheValue("trustedIP", trustedIP); + } + + /** + * {@inheritDoc} + */ + @Override public void update(Map<String, Object> args) { + // Merge cached setters and live args together before updating. + HashMap<String, Object> mergedArgs = new HashMap<>(); + mergedArgs.putAll(toUpdate); + mergedArgs.putAll(args); + service.post(path + "/settings", mergedArgs); + toUpdate.clear(); + invalidate(); + } + + /** + * {@inheritDoc} + */ + @Override public void update() { + service.post(path + "/settings", toUpdate); + invalidate(); + } +} diff --git a/splunk/src/main/java/com/splunk/SimpleCookieStore.java b/splunk/src/main/java/com/splunk/SimpleCookieStore.java index 4fd60664..0dc12d2f 100644 --- a/splunk/src/main/java/com/splunk/SimpleCookieStore.java +++ b/splunk/src/main/java/com/splunk/SimpleCookieStore.java @@ -1,93 +1,93 @@ - -/* - * Copyright 2015 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.List; -import java.net.HttpCookie; -import java.util.Map; -import java.util.HashMap; -import java.lang.StringBuilder; - -/** - * The {@code SimpleCookieStore} class stores cookies for authentication. - */ -class SimpleCookieStore { - - public static final String SPLUNK_AUTH_COOKIE = "splunkd_"; - - private Map<String, String> cookieJar = new HashMap<String, String>(); - /** - * Adds cookies from a "Set-Cookie" header to the cookie store. - * - * @param setCookieHeader The result from a getRequestHeader("Set-Cookie") call - */ - public void add(String setCookieHeader) { - if (setCookieHeader != null) { - List<HttpCookie> cookies = HttpCookie.parse(setCookieHeader); - for (HttpCookie cookie : cookies) { - cookieJar.put(cookie.getName(), cookie.getValue()); - } - } - } - - /** - * Returns a string to be set as a "Cookie" header - * - * @return Cookie String in the format "Key=Value; Key=Value; etc" - */ - public String getCookies() { - StringBuilder cookieStringBuilder = new StringBuilder(); - - for (Map.Entry<String, String> cookie : cookieJar.entrySet()) { - cookieStringBuilder.append(cookie.getKey()); - cookieStringBuilder.append("="); - cookieStringBuilder.append(cookie.getValue()); - cookieStringBuilder.append("; "); - } - return cookieStringBuilder.toString(); - } - - /** - * Returns true if the cookie store is empty, false otherwise - * - * @return Boolean for whether or not the cookie store is empty - */ - public Boolean isEmpty() { - return cookieJar.isEmpty(); - } - - public boolean hasSplunkAuthCookie(){ - if(cookieJar.isEmpty()){ - return false; - } - for(String cookie : cookieJar.keySet()){ - if(cookie.startsWith(SPLUNK_AUTH_COOKIE)){ - return true; - } - } - return false; - } - - /** - * Removes all cookies from SimpleCookieStore - */ - public void removeAll() { - cookieJar.clear(); - } - -} + +/* + * Copyright 2015 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.List; +import java.net.HttpCookie; +import java.util.Map; +import java.util.HashMap; +import java.lang.StringBuilder; + +/** + * The {@code SimpleCookieStore} class stores cookies for authentication. + */ +class SimpleCookieStore { + + public static final String SPLUNK_AUTH_COOKIE = "splunkd_"; + + private Map<String, String> cookieJar = new HashMap<>(); + /** + * Adds cookies from a "Set-Cookie" header to the cookie store. + * + * @param setCookieHeader The result from a getRequestHeader("Set-Cookie") call + */ + public void add(String setCookieHeader) { + if (setCookieHeader != null) { + List<HttpCookie> cookies = HttpCookie.parse(setCookieHeader); + for (HttpCookie cookie : cookies) { + cookieJar.put(cookie.getName(), cookie.getValue()); + } + } + } + + /** + * Returns a string to be set as a "Cookie" header + * + * @return Cookie String in the format "Key=Value; Key=Value; etc" + */ + public String getCookies() { + StringBuilder cookieStringBuilder = new StringBuilder(); + + for (Map.Entry<String, String> cookie : cookieJar.entrySet()) { + cookieStringBuilder.append(cookie.getKey()); + cookieStringBuilder.append("="); + cookieStringBuilder.append(cookie.getValue()); + cookieStringBuilder.append("; "); + } + return cookieStringBuilder.toString(); + } + + /** + * Returns true if the cookie store is empty, false otherwise + * + * @return Boolean for whether or not the cookie store is empty + */ + public Boolean isEmpty() { + return cookieJar.isEmpty(); + } + + public boolean hasSplunkAuthCookie(){ + if(cookieJar.isEmpty()){ + return false; + } + for(String cookie : cookieJar.keySet()){ + if(cookie.startsWith(SPLUNK_AUTH_COOKIE)){ + return true; + } + } + return false; + } + + /** + * Removes all cookies from SimpleCookieStore + */ + public void removeAll() { + cookieJar.clear(); + } + +} diff --git a/splunk/src/main/java/com/splunk/TcpInput.java b/splunk/src/main/java/com/splunk/TcpInput.java index 29faa6b5..a7905e0a 100644 --- a/splunk/src/main/java/com/splunk/TcpInput.java +++ b/splunk/src/main/java/com/splunk/TcpInput.java @@ -1,326 +1,312 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.IOException; -import java.io.OutputStream; -import java.net.Socket; - -/** - * The {@code TcpInput} class represents a raw TCP data input. This differs from - * a <i>cooked</i> TCP input in that this TCP input is in raw form, and is not - * processed (or "cooked"). - */ -public class TcpInput extends PortInput { - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The raw TCP input endpoint. - */ - TcpInput(Service service, String path) { - super(service, path); - } - - /** - * Returns a socket attached to this raw TCP input. - * @return Socket instance - * @throws IOException The IOException instance - */ - public Socket attach() throws IOException { - return new Socket(this.service.getHost(), this.getPort()); - } - - /** - * Submits events to this raw TCP input, reusing the connection. - * - * This method passes an output stream connected to the index to the - * {@code run} method of the {@code ReceiverBehavior} object, then handles - * setting up and tearing down the socket. - * For an example of how to use this method, see - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" target="_blank">How to - * get data into Splunk</a> on - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" - * target="_blank">dev.splunk.com</a>. - * @param behavior The ReceiverBehavior instance - * @throws IOException The IOException instance - */ - public void attachWith(ReceiverBehavior behavior) throws IOException { - Socket socket = null; - OutputStream output = null; - try { - socket = attach(); - output = socket.getOutputStream(); - behavior.run(output); - output.flush(); - } finally { - if (output != null) { output.close(); } - if (socket != null) { socket.close(); } - } - } - - /** - * Returns an object that contains the inbound raw TCP connections. - * - * @return The TCP connections object. - */ - public TcpConnections connections() { - return new TcpConnections(service, path + "/connections"); - } - - /** - * Returns the style of host connection. Valid values are: "ip", "dns", and - * "none". - * - * @return The style of host connection, or {@code null} if not specified. - */ - public String getConnectionHost() { - return getString("connection_host", null); - } - - /** - * Returns the group of this raw TCP input. - * - * @return The group. - */ - public String getGroup() { - return getString("group", null); - } - - /** - * Returns the source host of this raw TCP input where this indexer gets its - * data. - * - * @return The source host, or {@code null} if not specified. - */ - public String getHost() { - return getString("host", null); - } - - /** - * Returns the index name of this raw TCP input. - * - * @return The index name, or {@code null} if not specified. - */ - public String getIndex() { - return getString("index", null); - } - - /** - * Returns the input kind of this input. - * - * @return The input kind. - */ - public InputKind getKind() { - return InputKind.Tcp; - } - - /** - * Returns the queue for this raw TCP input. Valid values are: - * "parsingQueue" and "indexQueue". - * - * @return The queue, or {@code null} if not specified. - */ - public String getQueue() { - return getString("queue", null); - } - - /** - * @deprecated Returns the value of the {@code _rcvbuf} attribute for this - * TCP input. - * - * @return The {@code _rcvbuf} value. - */ - public int getRcvBuf() { - return getInteger("_rcvbuf"); - } - - /** - * Returns the incoming host restriction for this raw TCP input. When - * specified, this input only accepts data from the specified host. - * - * @return The incoming host restriction, or {@code null} if not specified. - */ - public String getRestrictToHost() { - return getString("restrictToHost", null); - } - - /** - * Returns the initial source key for this raw TCP input. Typically this - * value is the input file path. - * - * @return The source key, or {@code null} if not specified. - */ - public String getSource() { - return getString("source", null); - } - - /** - * Returns the source type for events from this raw TCP input. - * - * @return The source type, or {@code null} if not specified. - */ - public String getSourceType() { - return getString("sourcetype", null); - } - - /** - * Indicates whether this raw TCP input is using secure socket layer (SSL). - * - * @return {@code true} if this input is using SSL, {@code false} if not. - */ - public boolean getSSL() { - return getBoolean("SSL", false); - } - - /** - * Sets whether to use secure socket layer (SSL). - * - * @param SSL {@code true} to use SSL, {@code false} if not. - */ - public void setSSL(boolean SSL) { - setCacheValue("SSL", SSL); - } - - /** - * Sets the value for the <b>from-host</b> field for the remote server that - * is sending data. Valid values are: <ul> - * <li>"ip": Sets the host to the IP address of the remote server sending - * data.</li> - * <li>"dns": Sets the host to the reverse DNS entry for the IP address of - * the remote server sending data.</li> - * <li>"none": Leaves the host as specified in inputs.conf, which is - * typically the Splunk system host name.</li></ul> - * - * @param connection_host The connection host information. - */ - public void setConnectionHost(String connection_host) { - setCacheValue("connection_host", connection_host); - } - - /** - * Sets whether this input is enabled or disabled. - * <p> - * <b>Note:</b> Using this method requires you to restart Splunk before this - * setting takes effect. To avoid restarting Splunk, use the - * {@code Entity.disable} and {@code Entity.enable} methods instead, which - * take effect immediately. - * - * @param disabled {@code true} to disable this input, {@code false} to - * enable it. - */ - public void setDisabled(boolean disabled) { - setCacheValue("disabled", disabled); - } - - /** - * Sets the host from which the indexer gets data. - * - * @param host The host name. - */ - public void setHost(String host) { - setCacheValue("host", host); - } - - /** - * Sets the index in which to store all generated events. - * - * @param index The index name. - */ - public void setIndex(String index) { - setCacheValue("index", index); - } - - /** - * Submit a single event to this raw TCP input by opening the connection, - * submitting the event, and closing the connection. To submit multiple - * events, use {@code attachWith} to open a single connection. - * @see #attachWith - * - * @param eventBody A string that contains the event. - * @throws IOException The IOException instance - */ - public void submit(String eventBody) throws IOException { - Socket socket = null; - OutputStream output = null; - try { - socket = attach(); - output = socket.getOutputStream(); - output.write(eventBody.getBytes("UTF-8")); - output.flush(); - output.close(); - socket.close(); - } finally { - if (output != null) { output.close(); } - if (socket != null) { socket.close(); } - } - } - - /** - * Sets how the input processor should deposit the events it reads. Valid - * values are:<ul> - * <li>"parsingQueue": Applies props.conf and other parsing rules to your - * data.</li> - * <li>"indexQueue": Sends your data directly into the index.</li></ul> - * - * @param queue The queue-processing type. - */ - public void setQueue(String queue) { - setCacheValue("queue", queue); - } - - /** - * Sets the timeout value for adding a Done key. - * - * If a connection over the input port specified by {@code name} remains - * idle after receiving data for this specified number of seconds, it adds - * a Done key, implying that the last event has been completely received. - * - * @param rawTcpDoneTimeout The timeout value, in seconds. - */ - public void setRawTcpDoneTimeout(int rawTcpDoneTimeout) { - setCacheValue("rawTcpDoneTimeout", rawTcpDoneTimeout); - } - - /** - * Sets the initial value for the source key for events from this - * input. The source key is used during parsing and indexing. The - * <b>source</b> field is used for searches. As a convenience, the source - * string is prepended with "source::". - * <p> - * <b>Note:</b> Overriding the source key is generally not recommended. - * Typically, the input layer provides a more accurate string to aid in - * problem analysis and investigation, accurately recording the file from - * which the data was retrieved. Consider the use of source types, tagging, - * and search wildcards before overriding this value. - * - * @param source The source. - */ - public void setSource(String source) { - setCacheValue("source", source); - } - - /** - * Sets the source type for events from this raw TCP input. - * - * @param sourcetype The source type. - */ - public void setSourceType(String sourcetype) { - setCacheValue("sourcetype", sourcetype); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.IOException; +import java.io.OutputStream; +import java.net.Socket; + +/** + * The {@code TcpInput} class represents a raw TCP data input. This differs from + * a <i>cooked</i> TCP input in that this TCP input is in raw form, and is not + * processed (or "cooked"). + */ +public class TcpInput extends PortInput { + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The raw TCP input endpoint. + */ + TcpInput(Service service, String path) { + super(service, path); + } + + /** + * Returns a socket attached to this raw TCP input. + * @return Socket instance + * @throws IOException The IOException instance + */ + public Socket attach() throws IOException { + return new Socket(this.service.getHost(), this.getPort()); + } + + /** + * Submits events to this raw TCP input, reusing the connection. + * + * This method passes an output stream connected to the index to the + * {@code run} method of the {@code ReceiverBehavior} object, then handles + * setting up and tearing down the socket. + * For an example of how to use this method, see + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" target="_blank">How to + * get data into Splunk</a> on + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" + * target="_blank">dev.splunk.com</a>. + * @param behavior The ReceiverBehavior instance + * @throws IOException The IOException instance + */ + public void attachWith(ReceiverBehavior behavior) throws IOException { + try (Socket socket = attach(); + OutputStream output = socket.getOutputStream();) { + behavior.run(output); + output.flush(); + } + } + + /** + * Returns an object that contains the inbound raw TCP connections. + * + * @return The TCP connections object. + */ + public TcpConnections connections() { + return new TcpConnections(service, path + "/connections"); + } + + /** + * Returns the style of host connection. Valid values are: "ip", "dns", and + * "none". + * + * @return The style of host connection, or {@code null} if not specified. + */ + public String getConnectionHost() { + return getString("connection_host", null); + } + + /** + * Returns the group of this raw TCP input. + * + * @return The group. + */ + public String getGroup() { + return getString("group", null); + } + + /** + * Returns the source host of this raw TCP input where this indexer gets its + * data. + * + * @return The source host, or {@code null} if not specified. + */ + public String getHost() { + return getString("host", null); + } + + /** + * Returns the index name of this raw TCP input. + * + * @return The index name, or {@code null} if not specified. + */ + public String getIndex() { + return getString("index", null); + } + + /** + * Returns the input kind of this input. + * + * @return The input kind. + */ + public InputKind getKind() { + return InputKind.Tcp; + } + + /** + * Returns the queue for this raw TCP input. Valid values are: + * "parsingQueue" and "indexQueue". + * + * @return The queue, or {@code null} if not specified. + */ + public String getQueue() { + return getString("queue", null); + } + + /** + * @deprecated Returns the value of the {@code _rcvbuf} attribute for this + * TCP input. + * + * @return The {@code _rcvbuf} value. + */ + public int getRcvBuf() { + return getInteger("_rcvbuf"); + } + + /** + * Returns the incoming host restriction for this raw TCP input. When + * specified, this input only accepts data from the specified host. + * + * @return The incoming host restriction, or {@code null} if not specified. + */ + public String getRestrictToHost() { + return getString("restrictToHost", null); + } + + /** + * Returns the initial source key for this raw TCP input. Typically this + * value is the input file path. + * + * @return The source key, or {@code null} if not specified. + */ + public String getSource() { + return getString("source", null); + } + + /** + * Returns the source type for events from this raw TCP input. + * + * @return The source type, or {@code null} if not specified. + */ + public String getSourceType() { + return getString("sourcetype", null); + } + + /** + * Indicates whether this raw TCP input is using secure socket layer (SSL). + * + * @return {@code true} if this input is using SSL, {@code false} if not. + */ + public boolean getSSL() { + return getBoolean("SSL", false); + } + + /** + * Sets whether to use secure socket layer (SSL). + * + * @param SSL {@code true} to use SSL, {@code false} if not. + */ + public void setSSL(boolean SSL) { + setCacheValue("SSL", SSL); + } + + /** + * Sets the value for the <b>from-host</b> field for the remote server that + * is sending data. Valid values are: <ul> + * <li>"ip": Sets the host to the IP address of the remote server sending + * data.</li> + * <li>"dns": Sets the host to the reverse DNS entry for the IP address of + * the remote server sending data.</li> + * <li>"none": Leaves the host as specified in inputs.conf, which is + * typically the Splunk system host name.</li></ul> + * + * @param connection_host The connection host information. + */ + public void setConnectionHost(String connection_host) { + setCacheValue("connection_host", connection_host); + } + + /** + * Sets whether this input is enabled or disabled. + * <p> + * <b>Note:</b> Using this method requires you to restart Splunk before this + * setting takes effect. To avoid restarting Splunk, use the + * {@code Entity.disable} and {@code Entity.enable} methods instead, which + * take effect immediately. + * + * @param disabled {@code true} to disable this input, {@code false} to + * enable it. + */ + public void setDisabled(boolean disabled) { + setCacheValue("disabled", disabled); + } + + /** + * Sets the host from which the indexer gets data. + * + * @param host The host name. + */ + public void setHost(String host) { + setCacheValue("host", host); + } + + /** + * Sets the index in which to store all generated events. + * + * @param index The index name. + */ + public void setIndex(String index) { + setCacheValue("index", index); + } + + /** + * Submit a single event to this raw TCP input by opening the connection, + * submitting the event, and closing the connection. To submit multiple + * events, use {@code attachWith} to open a single connection. + * @see #attachWith + * + * @param eventBody A string that contains the event. + * @throws IOException The IOException instance + */ + public void submit(String eventBody) throws IOException { + try (Socket socket = attach(); + OutputStream output = socket.getOutputStream();) { + output.write(eventBody.getBytes("UTF-8")); + output.flush(); + } + } + + /** + * Sets how the input processor should deposit the events it reads. Valid + * values are:<ul> + * <li>"parsingQueue": Applies props.conf and other parsing rules to your + * data.</li> + * <li>"indexQueue": Sends your data directly into the index.</li></ul> + * + * @param queue The queue-processing type. + */ + public void setQueue(String queue) { + setCacheValue("queue", queue); + } + + /** + * Sets the timeout value for adding a Done key. + * + * If a connection over the input port specified by {@code name} remains + * idle after receiving data for this specified number of seconds, it adds + * a Done key, implying that the last event has been completely received. + * + * @param rawTcpDoneTimeout The timeout value, in seconds. + */ + public void setRawTcpDoneTimeout(int rawTcpDoneTimeout) { + setCacheValue("rawTcpDoneTimeout", rawTcpDoneTimeout); + } + + /** + * Sets the initial value for the source key for events from this + * input. The source key is used during parsing and indexing. The + * <b>source</b> field is used for searches. As a convenience, the source + * string is prepended with "source::". + * <p> + * <b>Note:</b> Overriding the source key is generally not recommended. + * Typically, the input layer provides a more accurate string to aid in + * problem analysis and investigation, accurately recording the file from + * which the data was retrieved. Consider the use of source types, tagging, + * and search wildcards before overriding this value. + * + * @param source The source. + */ + public void setSource(String source) { + setCacheValue("source", source); + } + + /** + * Sets the source type for events from this raw TCP input. + * + * @param sourcetype The source type. + */ + public void setSourceType(String sourcetype) { + setCacheValue("sourcetype", sourcetype); + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/InputDefinition.java b/splunk/src/main/java/com/splunk/modularinput/InputDefinition.java old mode 100755 new mode 100644 index f7431e78..abfc256f --- a/splunk/src/main/java/com/splunk/modularinput/InputDefinition.java +++ b/splunk/src/main/java/com/splunk/modularinput/InputDefinition.java @@ -1,225 +1,224 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -import org.w3c.dom.Document; -import org.w3c.dom.Node; -import org.xml.sax.SAXException; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import java.io.IOException; -import java.io.InputStream; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * The {@code InputDefinition} class encodes the XML defining inputs that Splunk passes to - * a modular input script as a Java object. - */ -public class InputDefinition { - // We use a map to hold all parameters such as server host, server URI, etc. instead of individual fields - // so that additions to the input definition contract in the future won't break this implementation. It also - // simplifies the parsing code below. - private Map<String,String> metadata; - - private Map<String, Map<String, Parameter>> inputs; - - private final String serverHostField = "server_host"; - private final String serverUriField = "server_uri"; - private final String checkpointDirField = "checkpoint_dir"; - private final String sessionKeyField = "session_key"; - - // Package private on purpose - InputDefinition() { - inputs = new HashMap<String, Map<String, Parameter>>(); - metadata = new HashMap<String, String>(); - } - - /** - * Gets the name of the field to fetch. - * - * In future versions of Splunk, there may be additional fields on the {@code InputDefinition}. {@code getField} permits - * access to them in case you are constrained to an old version of the Splunk SDK for Java. - * - * @param fieldName The name of the field to fetch. - * @return The field. - */ - public String getField(String fieldName) { - return this.metadata.get(fieldName); - } - - /** - * Sets the name of the server on which this modular input is being run. - * @param serverHost String value - */ - public void setServerHost(String serverHost) { - this.metadata.put(serverHostField, serverHost); - } - - /** - * Gets the name of the server on which this modular input is being run. - * - * @return The name of the server on which this modular input is being run. - */ - public String getServerHost() { - return this.metadata.get(serverHostField); - } - - /** - * Sets the URI to reach the server on which this modular input is being run. - * - * @param serverUri The URI to reach the server on which this modular input is being run. - */ - public void setServerUri(String serverUri) { - this.metadata.put(serverUriField, serverUri); - } - - /** - * Gets the URI to the server on which this modular input is being run. - * - * @return The URI to the server on which this modular input is being run. - */ - public String getServerUri() { - return this.metadata.get(serverUriField); - } - - /** - * Sets the path to which to write checkpoint files. - * - * @param checkpointDir The path to which to write checkpoint files. - */ - public void setCheckpointDir(String checkpointDir) { - this.metadata.put(checkpointDirField, checkpointDir); - } - - /** - * Gets the path to which to write checkpoint files for restarting inputs. - * - * @return The path to which to write checkpoint files for restarting inputs. - */ - public String getCheckpointDir() { - return this.metadata.get(checkpointDirField); - } - - /** - * Sets a session key that can be used to access splunkd's REST API. - * - * @param sessionKey A session key that can be used to access splunkd's REST API. - */ - public void setSessionKey(String sessionKey) { - this.metadata.put(sessionKeyField, sessionKey); - } - - /** - * Sets a session providing access to splunkd's REST API on this host. - * - * @return A session key providing access to splunkd's REST API on this host. - */ - public String getSessionKey() { - return this.metadata.get(sessionKeyField); - } - - /** - * Adds an input to the set of inputs on this {@code InputDefinition}. - * - * @param name The name of this input (e.g., foobar://this-input-name). - * @param parameters A collection of {@code Parameter} objects giving the settings for this input. - */ - public void addInput(String name, Collection<Parameter> parameters) { - Map<String, Parameter> paramMap = new HashMap<String, Parameter>(); - - for (Parameter p : parameters) { - paramMap.put(p.getName(), p); - } - - this.inputs.put(name, paramMap); - } - - /** - * @return A map of all the inputs specified in this {@code InputDefinition}. - */ - public Map<String, Map<String, Parameter>> getInputs() { - return this.inputs; - } - - /** - * Parses a stream containing XML into an InputDefinition. - * - * @param stream The stream containing XML to parse. - * @return An {@code InputDefinition} object. - * @throws ParserConfigurationException If there are errors in setting up the parser (which indicates system - * configuration issues). - * @throws IOException If there is an error in reading from the stream. - * @throws SAXException When the XML is invalid. - * @throws MalformedDataException When the XML does specify a valid set of inputs. - */ - public static InputDefinition parseDefinition(InputStream stream) throws ParserConfigurationException, - IOException, SAXException, MalformedDataException { - DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); - documentBuilderFactory.setIgnoringElementContentWhitespace(true); - documentBuilderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); - documentBuilderFactory.setExpandEntityReferences(false); - documentBuilderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); - documentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", false); - DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); - Document doc = documentBuilder.parse(stream); - - InputDefinition definition = new InputDefinition(); - for (Node node = doc.getDocumentElement().getFirstChild(); node != null; node = node.getNextSibling()) { - if (node.getNodeType() == Node.TEXT_NODE) { - continue; - } else if (node.getNodeName().equals("configuration")) { - for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) { - if (child.getNodeType() == Node.TEXT_NODE) { - continue; - } - if (!child.getNodeName().equals("stanza")) { - throw new MalformedDataException("Expected stanza element; found " + child.getNodeName()); - } - String name = child.getAttributes().getNamedItem("name").getNodeValue(); - List<Parameter> parameter = Parameter.nodeToParameterList(child); - definition.addInput(name, parameter); - } - } else { - definition.metadata.put( - node.getNodeName(), - XmlUtil.textInNode(node, "Expected a text value in element " + node.getNodeName()) - ); - } - } - - return definition; - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof InputDefinition)) { - return false; - } - InputDefinition that = (InputDefinition)other; - return this.metadata.equals(that.metadata) && this.inputs.equals(that.inputs); - } - - @Override - public int hashCode() { - return this.metadata.hashCode() ^ (this.getInputs() == null ? 0 : this.getInputs().hashCode()); - } -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.IOException; +import java.io.InputStream; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * The {@code InputDefinition} class encodes the XML defining inputs that Splunk passes to + * a modular input script as a Java object. + */ +public class InputDefinition { + // We use a map to hold all parameters such as server host, server URI, etc. instead of individual fields + // so that additions to the input definition contract in the future won't break this implementation. It also + // simplifies the parsing code below. + private Map<String,String> metadata; + + private Map<String, Map<String, Parameter>> inputs; + + private final String serverHostField = "server_host"; + private final String serverUriField = "server_uri"; + private final String checkpointDirField = "checkpoint_dir"; + private final String sessionKeyField = "session_key"; + + // Package private on purpose + InputDefinition() { + inputs = new HashMap<>(); + metadata = new HashMap<>(); + } + + /** + * Gets the name of the field to fetch. + * + * In future versions of Splunk, there may be additional fields on the {@code InputDefinition}. {@code getField} permits + * access to them in case you are constrained to an old version of the Splunk SDK for Java. + * + * @param fieldName The name of the field to fetch. + * @return The field. + */ + public String getField(String fieldName) { + return this.metadata.get(fieldName); + } + + /** + * Sets the name of the server on which this modular input is being run. + * @param serverHost String value + */ + public void setServerHost(String serverHost) { + this.metadata.put(serverHostField, serverHost); + } + + /** + * Gets the name of the server on which this modular input is being run. + * + * @return The name of the server on which this modular input is being run. + */ + public String getServerHost() { + return this.metadata.get(serverHostField); + } + + /** + * Sets the URI to reach the server on which this modular input is being run. + * + * @param serverUri The URI to reach the server on which this modular input is being run. + */ + public void setServerUri(String serverUri) { + this.metadata.put(serverUriField, serverUri); + } + + /** + * Gets the URI to the server on which this modular input is being run. + * + * @return The URI to the server on which this modular input is being run. + */ + public String getServerUri() { + return this.metadata.get(serverUriField); + } + + /** + * Sets the path to which to write checkpoint files. + * + * @param checkpointDir The path to which to write checkpoint files. + */ + public void setCheckpointDir(String checkpointDir) { + this.metadata.put(checkpointDirField, checkpointDir); + } + + /** + * Gets the path to which to write checkpoint files for restarting inputs. + * + * @return The path to which to write checkpoint files for restarting inputs. + */ + public String getCheckpointDir() { + return this.metadata.get(checkpointDirField); + } + + /** + * Sets a session key that can be used to access splunkd's REST API. + * + * @param sessionKey A session key that can be used to access splunkd's REST API. + */ + public void setSessionKey(String sessionKey) { + this.metadata.put(sessionKeyField, sessionKey); + } + + /** + * Sets a session providing access to splunkd's REST API on this host. + * + * @return A session key providing access to splunkd's REST API on this host. + */ + public String getSessionKey() { + return this.metadata.get(sessionKeyField); + } + + /** + * Adds an input to the set of inputs on this {@code InputDefinition}. + * + * @param name The name of this input (e.g., foobar://this-input-name). + * @param parameters A collection of {@code Parameter} objects giving the settings for this input. + */ + public void addInput(String name, Collection<Parameter> parameters) { + Map<String, Parameter> paramMap = new HashMap<>(); + + for (Parameter p : parameters) { + paramMap.put(p.getName(), p); + } + + this.inputs.put(name, paramMap); + } + + /** + * @return A map of all the inputs specified in this {@code InputDefinition}. + */ + public Map<String, Map<String, Parameter>> getInputs() { + return this.inputs; + } + + /** + * Parses a stream containing XML into an InputDefinition. + * + * @param stream The stream containing XML to parse. + * @return An {@code InputDefinition} object. + * @throws ParserConfigurationException If there are errors in setting up the parser (which indicates system + * configuration issues). + * @throws IOException If there is an error in reading from the stream. + * @throws SAXException When the XML is invalid. + * @throws MalformedDataException When the XML does specify a valid set of inputs. + */ + public static InputDefinition parseDefinition(InputStream stream) throws ParserConfigurationException, + IOException, SAXException, MalformedDataException { + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + documentBuilderFactory.setIgnoringElementContentWhitespace(true); + documentBuilderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + documentBuilderFactory.setExpandEntityReferences(false); + documentBuilderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); + documentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", false); + DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); + Document doc = documentBuilder.parse(stream); + + InputDefinition definition = new InputDefinition(); + for (Node node = doc.getDocumentElement().getFirstChild(); node != null; node = node.getNextSibling()) { + if (node.getNodeType() == Node.TEXT_NODE) { + continue; + } else if (node.getNodeName().equals("configuration")) { + for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) { + if (child.getNodeType() == Node.TEXT_NODE) { + continue; + } + if (!child.getNodeName().equals("stanza")) { + throw new MalformedDataException("Expected stanza element; found " + child.getNodeName()); + } + String name = child.getAttributes().getNamedItem("name").getNodeValue(); + List<Parameter> parameter = Parameter.nodeToParameterList(child); + definition.addInput(name, parameter); + } + } else { + definition.metadata.put( + node.getNodeName(), + XmlUtil.textInNode(node, "Expected a text value in element " + node.getNodeName()) + ); + } + } + + return definition; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof InputDefinition that)) { + return false; + } + return this.metadata.equals(that.metadata) && this.inputs.equals(that.inputs); + } + + @Override + public int hashCode() { + return this.metadata.hashCode() ^ (this.getInputs() == null ? 0 : this.getInputs().hashCode()); + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/MultiValueParameter.java b/splunk/src/main/java/com/splunk/modularinput/MultiValueParameter.java old mode 100755 new mode 100644 index 39c3ce57..ceb7dd2f --- a/splunk/src/main/java/com/splunk/modularinput/MultiValueParameter.java +++ b/splunk/src/main/java/com/splunk/modularinput/MultiValueParameter.java @@ -1,83 +1,82 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -import java.util.ArrayList; -import java.util.List; - -/** - * The {@code MultiValueParameter} class represents a parameter containing multiple values that is passed as part of a definition - * of a modular input instance. {@code MultiValueParameter} objects correspond to XML fragments of the form: - * - * <pre> - * {@code - * <param_list name="multiValue"> - * <value>value1</value> - * <value>value2</value> - * </param_list> - * } - * </pre> - */ -public class MultiValueParameter extends Parameter { - private final String name; - private final List<String> values; - - // Note: package private constructor by design so parameters cannot be instantiated by the user. - MultiValueParameter(String name) { - this.name = name; - this.values = new ArrayList<String>(); - } - - /** - * Gets the name of this parameter. - * - * @return The name of this parameter. - */ - public String getName() { - return this.name; - } - - /** - * Gets a list of all values of this parameter. - * - * @return A list of all values of this parameter. - */ - public List<String> getValues() { - return this.values; - } - - // Package private by design. - void appendValue(String value) { - this.values.add(value); - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof MultiValueParameter)) { - return false; - } else { - MultiValueParameter that = (MultiValueParameter)other; - return this.values.equals(that.values) && this.name.equals(that.name); - } - } - - @Override - public int hashCode() { - return (this.name == null ? 0 : this.name.hashCode()) ^ - (this.values == null ? 0 : this.values.hashCode()); - } -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +import java.util.ArrayList; +import java.util.List; + +/** + * The {@code MultiValueParameter} class represents a parameter containing multiple values that is passed as part of a definition + * of a modular input instance. {@code MultiValueParameter} objects correspond to XML fragments of the form: + * + * <pre> + * {@code + * <param_list name="multiValue"> + * <value>value1</value> + * <value>value2</value> + * </param_list> + * } + * </pre> + */ +public class MultiValueParameter extends Parameter { + private final String name; + private final List<String> values; + + // Note: package private constructor by design so parameters cannot be instantiated by the user. + MultiValueParameter(String name) { + this.name = name; + this.values = new ArrayList<>(); + } + + /** + * Gets the name of this parameter. + * + * @return The name of this parameter. + */ + public String getName() { + return this.name; + } + + /** + * Gets a list of all values of this parameter. + * + * @return A list of all values of this parameter. + */ + public List<String> getValues() { + return this.values; + } + + // Package private by design. + void appendValue(String value) { + this.values.add(value); + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof MultiValueParameter that)) { + return false; + } else { + return this.values.equals(that.values) && this.name.equals(that.name); + } + } + + @Override + public int hashCode() { + return (this.name == null ? 0 : this.name.hashCode()) ^ + (this.values == null ? 0 : this.values.hashCode()); + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/Parameter.java b/splunk/src/main/java/com/splunk/modularinput/Parameter.java old mode 100755 new mode 100644 index 23cb8585..30b31cc3 --- a/splunk/src/main/java/com/splunk/modularinput/Parameter.java +++ b/splunk/src/main/java/com/splunk/modularinput/Parameter.java @@ -1,98 +1,98 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -import org.w3c.dom.Node; - -import java.util.ArrayList; -import java.util.List; - -/** - * The {@code Parameter} class is a base class for parameters of modular inputs. It has two subclasses: {@code SingleValueParameter} - * and {@code MultiValueParameter}. - * - * All parameters should be constructed with the static {@code nodeToParameterList} method, which takes an XML {@code org.w3c.dom.Node} - * object as its argument and returns a list of {@code Parameter} objects, single valued or multi valued as needed. - */ -public abstract class Parameter { - public abstract String getName(); - - // Package private to enforce using the nodeToParameterList function to create Parameter objects. - Parameter() { - super(); - } - - /** - * Generates a list of {@code Parameter} objects from an {@code org.w3c.dom.Node} object containing a set of parameters. The node - * may be any element, but is expected to contain elements param or param_list, as in - * - * <pre> - * {@code - * <stanza name="foobar://aaa"> - * <param name="param1">value1</param> - * <param name="param2">value2</param> - * <param name="disabled">0</param> - * <param name="index">default</param> - * <param_list name="multiValue"> - * <value>value1</value> - * <value>value2</value> - * </param_list> - * </stanza> - * } - * </pre> - * - * @param node An {@code org.w3c.dom.Node} object containing the parameter list as children. - * @return A list of Parameter objects extracted from the XML. - * @throws com.splunk.modularinput.MalformedDataException If the XML does not specify a valid parameter list. - */ - public static List<Parameter> nodeToParameterList(Node node) throws MalformedDataException { - List<Parameter> parameters = new ArrayList<Parameter>(); - - for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) { - if (child.getNodeType() == Node.TEXT_NODE) { - continue; - } - if ("param".equals(child.getNodeName())) { - // This is a single value parameter - String name = child.getAttributes().getNamedItem("name").getNodeValue(); - String value = XmlUtil.textInNode(child, "Element param with name=\"" + name + - "\" did not contain text."); - parameters.add(new SingleValueParameter(name, value)); - } else if ("param_list".equals(child.getNodeName())) { - String name = child.getAttributes().getNamedItem("name").getNodeValue(); - MultiValueParameter parameter = new MultiValueParameter(name); - for (Node valueNode = child.getFirstChild(); valueNode != null; valueNode = valueNode.getNextSibling()) { - if (valueNode.getNodeType() == Node.TEXT_NODE) continue; - if (!"value".equals(valueNode.getNodeName())) { - throw new MalformedDataException("Expected a value element in parameter named " + - child.getNodeName() + "; found " + valueNode.getNodeName()); - } else { - parameter.appendValue(XmlUtil.textInNode( - valueNode, - "value element in parameter named " + child.getNodeName() + " did not contain text." - )); - } - } - parameters.add(parameter); - } else { - throw new MalformedDataException("Bad parameter element named " + child.getNodeName()); - } - } - - return parameters; - } -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +import org.w3c.dom.Node; + +import java.util.ArrayList; +import java.util.List; + +/** + * The {@code Parameter} class is a base class for parameters of modular inputs. It has two subclasses: {@code SingleValueParameter} + * and {@code MultiValueParameter}. + * + * All parameters should be constructed with the static {@code nodeToParameterList} method, which takes an XML {@code org.w3c.dom.Node} + * object as its argument and returns a list of {@code Parameter} objects, single valued or multi valued as needed. + */ +public abstract class Parameter { + public abstract String getName(); + + // Package private to enforce using the nodeToParameterList function to create Parameter objects. + Parameter() { + super(); + } + + /** + * Generates a list of {@code Parameter} objects from an {@code org.w3c.dom.Node} object containing a set of parameters. The node + * may be any element, but is expected to contain elements param or param_list, as in + * + * <pre> + * {@code + * <stanza name="foobar://aaa"> + * <param name="param1">value1</param> + * <param name="param2">value2</param> + * <param name="disabled">0</param> + * <param name="index">default</param> + * <param_list name="multiValue"> + * <value>value1</value> + * <value>value2</value> + * </param_list> + * </stanza> + * } + * </pre> + * + * @param node An {@code org.w3c.dom.Node} object containing the parameter list as children. + * @return A list of Parameter objects extracted from the XML. + * @throws com.splunk.modularinput.MalformedDataException If the XML does not specify a valid parameter list. + */ + public static List<Parameter> nodeToParameterList(Node node) throws MalformedDataException { + List<Parameter> parameters = new ArrayList<>(); + + for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) { + if (child.getNodeType() == Node.TEXT_NODE) { + continue; + } + if ("param".equals(child.getNodeName())) { + // This is a single value parameter + String name = child.getAttributes().getNamedItem("name").getNodeValue(); + String value = XmlUtil.textInNode(child, "Element param with name=\"" + name + + "\" did not contain text."); + parameters.add(new SingleValueParameter(name, value)); + } else if ("param_list".equals(child.getNodeName())) { + String name = child.getAttributes().getNamedItem("name").getNodeValue(); + MultiValueParameter parameter = new MultiValueParameter(name); + for (Node valueNode = child.getFirstChild(); valueNode != null; valueNode = valueNode.getNextSibling()) { + if (valueNode.getNodeType() == Node.TEXT_NODE) continue; + if (!"value".equals(valueNode.getNodeName())) { + throw new MalformedDataException("Expected a value element in parameter named " + + child.getNodeName() + "; found " + valueNode.getNodeName()); + } else { + parameter.appendValue(XmlUtil.textInNode( + valueNode, + "value element in parameter named " + child.getNodeName() + " did not contain text." + )); + } + } + parameters.add(parameter); + } else { + throw new MalformedDataException("Bad parameter element named " + child.getNodeName()); + } + } + + return parameters; + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/Scheme.java b/splunk/src/main/java/com/splunk/modularinput/Scheme.java old mode 100755 new mode 100644 index b160c22a..086d6534 --- a/splunk/src/main/java/com/splunk/modularinput/Scheme.java +++ b/splunk/src/main/java/com/splunk/modularinput/Scheme.java @@ -1,244 +1,244 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -import org.w3c.dom.Document; -import org.w3c.dom.Element; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import java.util.ArrayList; -import java.util.List; - - -/** - * The {@code Scheme} class represents the metadata for a modular input kind. - * - * A {@code Scheme} specifies a title, description, several options of how Splunk should run modular inputs of this - * kind, and a set of arguments which define a particular modular input's properties. - * - * The primary use of {@code Scheme} is to abstract away the construction of XML to feed to Splunk. - */ -public class Scheme { - private static DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); - - public enum StreamingMode { SIMPLE, XML }; - - // Name of this module input kind. <tt>title</tt> will be used as the URL scheme when - // specifying particular modular inputs. For example, if <tt>title</tt> is <tt>"abc"</tt>, - // a particular modular input of this kind would be referenced as <tt>abc://some_name</tt>. - protected String title; - - // Human readable description of this modular input kind. - protected String description = null; - - // Should this script be called by Splunk to validate the configuration of modular inputs of this kind? - // If false, then Splunk does some basic sanity checking. - protected boolean useExternalValidation = true; - - // Should all modular inputs of this kind share a single instance of this script? - protected boolean useSingleInstance = false; - - // Will events be streamed to Splunk from this modular input in simple text or in XML? XML is the default - // and should be preferred unless you have a really good reason to choose otherwise. - protected StreamingMode streamingMode = StreamingMode.XML; - - // A List of all the arguments that this modular input kind takes. - protected List<Argument> arguments; - - public Scheme(String title) { - this.title = title; - this.arguments = new ArrayList<Argument>(); - } - - /** - * Gets the title of this modular input kind. - * - * @return The title of this modular input kind. - */ - public String getTitle() { - return title; - } - - /** - * Sets the title of this modular input kind. - * - * @param title The title of this modular input kind. - */ - public void setTitle(String title) { - this.title = title; - } - - /** - * Gets the human readable description of this modular input kind. - * - * @return The human readable description of this modular input kind. - */ - public String getDescription() { - return description; - } - - /** - * Sets the human readable description of this modular input kind. - * - * @param description The human readable description of this modular input kind. - */ - public void setDescription(String description) { - this.description = description; - } - - /** - * Returns whether Splunk should use the modular input kind script to validate the arguments - * of a particular modular input or use the validation predicates specified by the arguments. - * - * @return {@code true} if Splunk should use the modular input kind script to validate the arguments - * of a particular modular input, {@code false} if it should use the validation predicates specified by the arguments. - */ - public boolean isUseExternalValidation() { - return useExternalValidation; - } - - /** - * Specifies whether Splunk should use the modular input kind script to validate the arguments - * of a particular modular input (true) or use the validation predicates specified by the arguments (false). - * - * @param useExternalValidation {@code true} if Splunk should use the modular input kind script to validate the arguments - * of a particular modular input, {@code false} if it should use the validation predicates specified by the arguments. - */ - public void setUseExternalValidation(boolean useExternalValidation) { - this.useExternalValidation = useExternalValidation; - } - - /** - * Returns whether Splunk should run all modular inputs of this kind via one instance of the script - * or start an instance for each modular input. - * - * @return {@code true} if Splunk should run all modular inputs of this kind via one instance of the script, - * {@code false} if it should start an instance for each modular input. - */ - public boolean isUseSingleInstance() { - return useSingleInstance; - } - - /** - * Specifies whether Splunk should run all modular inputs of this kind via one instance of the script - * or start an instance for each modular input. - * - * @param useSingleInstance {@code true} if Splunk should run all modular inputs of this kind via one instance of the script, - * {@code false} if it should start an instance for each modular input. - */ - public void setUseSingleInstance(boolean useSingleInstance) { - this.useSingleInstance = useSingleInstance; - } - - /** - * Returns whether this modular input kind will send events to Splunk as XML (the default and preferred - * value) or plain text. - * - * @return The streaming mode. - */ - public StreamingMode getStreamingMode() { - return streamingMode; - } - - /** - * Specifies whether this modular input kind will send events to Splunk as XML (the default and preferred - * value) or plain text. - * - * @param streamingMode The streaming mode. - */ - public void setStreamingMode(StreamingMode streamingMode) { - this.streamingMode = streamingMode; - } - - /** - * Returns all the arguments to this modular input kind. - * - * @return A list of all the arguments to this modular input kind. - */ - public List<Argument> getArguments() { - return arguments; - } - - /** - * Replaces the current list of arguments with the specified one. - * - * @param arguments The list of arguments with which to replace the current - * list of arguments. - */ - public void setArguments(List<Argument> arguments) { - this.arguments = new ArrayList<Argument>(arguments); - } - - /** - * Appends an argument to the arguments that this modular input kind takes. - * - * @param argument The argument to append to the arguments. - */ - public void addArgument(Argument argument) { - this.arguments.add(argument); - } - - /** - * Generates an XML encoding of this scheme to be passed to Splunk. - * - * @return An {@code org.w3c.dom.Document} object containing the XML of this scheme. - * @throws ParserConfigurationException If there was a problem configuring the XML libraries. - */ - Document toXml() throws ParserConfigurationException { - DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); - Document doc = documentBuilder.newDocument(); - - Element root = doc.createElement("scheme"); - doc.appendChild(root); - - Element title = doc.createElement("title"); - title.appendChild(doc.createTextNode(this.title)); - root.appendChild(title); - - if (this.description != null) { - Element description = doc.createElement("description"); - description.appendChild(doc.createTextNode(this.description)); - root.appendChild(description); - } - - Element useExternalValidation = doc.createElement("use_external_validation"); - useExternalValidation.appendChild(doc.createTextNode(Boolean.toString(this.useExternalValidation))); - root.appendChild(useExternalValidation); - - Element useSingleInstance = doc.createElement("use_single_instance"); - useSingleInstance.appendChild(doc.createTextNode(Boolean.toString(this.useSingleInstance))); - root.appendChild(useSingleInstance); - - Element streamingMode = doc.createElement("streaming_mode"); - streamingMode.appendChild(doc.createTextNode(this.streamingMode == StreamingMode.SIMPLE ? "simple" : "xml")); - root.appendChild(streamingMode); - - Element endpoint = doc.createElement("endpoint"); - root.appendChild(endpoint); - - Element args = doc.createElement("args"); - endpoint.appendChild(args); - - for (Argument arg : this.arguments) { - arg.addToDocument(doc, args); - } - - return doc; - } -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +import org.w3c.dom.Document; +import org.w3c.dom.Element; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.util.ArrayList; +import java.util.List; + + +/** + * The {@code Scheme} class represents the metadata for a modular input kind. + * + * A {@code Scheme} specifies a title, description, several options of how Splunk should run modular inputs of this + * kind, and a set of arguments which define a particular modular input's properties. + * + * The primary use of {@code Scheme} is to abstract away the construction of XML to feed to Splunk. + */ +public class Scheme { + private static DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + + public enum StreamingMode { SIMPLE, XML }; + + // Name of this module input kind. <tt>title</tt> will be used as the URL scheme when + // specifying particular modular inputs. For example, if <tt>title</tt> is <tt>"abc"</tt>, + // a particular modular input of this kind would be referenced as <tt>abc://some_name</tt>. + protected String title; + + // Human readable description of this modular input kind. + protected String description = null; + + // Should this script be called by Splunk to validate the configuration of modular inputs of this kind? + // If false, then Splunk does some basic sanity checking. + protected boolean useExternalValidation = true; + + // Should all modular inputs of this kind share a single instance of this script? + protected boolean useSingleInstance = false; + + // Will events be streamed to Splunk from this modular input in simple text or in XML? XML is the default + // and should be preferred unless you have a really good reason to choose otherwise. + protected StreamingMode streamingMode = StreamingMode.XML; + + // A List of all the arguments that this modular input kind takes. + protected List<Argument> arguments; + + public Scheme(String title) { + this.title = title; + this.arguments = new ArrayList<>(); + } + + /** + * Gets the title of this modular input kind. + * + * @return The title of this modular input kind. + */ + public String getTitle() { + return title; + } + + /** + * Sets the title of this modular input kind. + * + * @param title The title of this modular input kind. + */ + public void setTitle(String title) { + this.title = title; + } + + /** + * Gets the human readable description of this modular input kind. + * + * @return The human readable description of this modular input kind. + */ + public String getDescription() { + return description; + } + + /** + * Sets the human readable description of this modular input kind. + * + * @param description The human readable description of this modular input kind. + */ + public void setDescription(String description) { + this.description = description; + } + + /** + * Returns whether Splunk should use the modular input kind script to validate the arguments + * of a particular modular input or use the validation predicates specified by the arguments. + * + * @return {@code true} if Splunk should use the modular input kind script to validate the arguments + * of a particular modular input, {@code false} if it should use the validation predicates specified by the arguments. + */ + public boolean isUseExternalValidation() { + return useExternalValidation; + } + + /** + * Specifies whether Splunk should use the modular input kind script to validate the arguments + * of a particular modular input (true) or use the validation predicates specified by the arguments (false). + * + * @param useExternalValidation {@code true} if Splunk should use the modular input kind script to validate the arguments + * of a particular modular input, {@code false} if it should use the validation predicates specified by the arguments. + */ + public void setUseExternalValidation(boolean useExternalValidation) { + this.useExternalValidation = useExternalValidation; + } + + /** + * Returns whether Splunk should run all modular inputs of this kind via one instance of the script + * or start an instance for each modular input. + * + * @return {@code true} if Splunk should run all modular inputs of this kind via one instance of the script, + * {@code false} if it should start an instance for each modular input. + */ + public boolean isUseSingleInstance() { + return useSingleInstance; + } + + /** + * Specifies whether Splunk should run all modular inputs of this kind via one instance of the script + * or start an instance for each modular input. + * + * @param useSingleInstance {@code true} if Splunk should run all modular inputs of this kind via one instance of the script, + * {@code false} if it should start an instance for each modular input. + */ + public void setUseSingleInstance(boolean useSingleInstance) { + this.useSingleInstance = useSingleInstance; + } + + /** + * Returns whether this modular input kind will send events to Splunk as XML (the default and preferred + * value) or plain text. + * + * @return The streaming mode. + */ + public StreamingMode getStreamingMode() { + return streamingMode; + } + + /** + * Specifies whether this modular input kind will send events to Splunk as XML (the default and preferred + * value) or plain text. + * + * @param streamingMode The streaming mode. + */ + public void setStreamingMode(StreamingMode streamingMode) { + this.streamingMode = streamingMode; + } + + /** + * Returns all the arguments to this modular input kind. + * + * @return A list of all the arguments to this modular input kind. + */ + public List<Argument> getArguments() { + return arguments; + } + + /** + * Replaces the current list of arguments with the specified one. + * + * @param arguments The list of arguments with which to replace the current + * list of arguments. + */ + public void setArguments(List<Argument> arguments) { + this.arguments = new ArrayList<>(arguments); + } + + /** + * Appends an argument to the arguments that this modular input kind takes. + * + * @param argument The argument to append to the arguments. + */ + public void addArgument(Argument argument) { + this.arguments.add(argument); + } + + /** + * Generates an XML encoding of this scheme to be passed to Splunk. + * + * @return An {@code org.w3c.dom.Document} object containing the XML of this scheme. + * @throws ParserConfigurationException If there was a problem configuring the XML libraries. + */ + Document toXml() throws ParserConfigurationException { + DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); + Document doc = documentBuilder.newDocument(); + + Element root = doc.createElement("scheme"); + doc.appendChild(root); + + Element title = doc.createElement("title"); + title.appendChild(doc.createTextNode(this.title)); + root.appendChild(title); + + if (this.description != null) { + Element description = doc.createElement("description"); + description.appendChild(doc.createTextNode(this.description)); + root.appendChild(description); + } + + Element useExternalValidation = doc.createElement("use_external_validation"); + useExternalValidation.appendChild(doc.createTextNode(Boolean.toString(this.useExternalValidation))); + root.appendChild(useExternalValidation); + + Element useSingleInstance = doc.createElement("use_single_instance"); + useSingleInstance.appendChild(doc.createTextNode(Boolean.toString(this.useSingleInstance))); + root.appendChild(useSingleInstance); + + Element streamingMode = doc.createElement("streaming_mode"); + streamingMode.appendChild(doc.createTextNode(this.streamingMode == StreamingMode.SIMPLE ? "simple" : "xml")); + root.appendChild(streamingMode); + + Element endpoint = doc.createElement("endpoint"); + root.appendChild(endpoint); + + Element args = doc.createElement("args"); + endpoint.appendChild(args); + + for (Argument arg : this.arguments) { + arg.addToDocument(doc, args); + } + + return doc; + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/SingleValueParameter.java b/splunk/src/main/java/com/splunk/modularinput/SingleValueParameter.java old mode 100755 new mode 100644 index 1874a32e..2dfd6e7a --- a/splunk/src/main/java/com/splunk/modularinput/SingleValueParameter.java +++ b/splunk/src/main/java/com/splunk/modularinput/SingleValueParameter.java @@ -1,121 +1,120 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -/** - * The {@code SingleValueParameter} class represents a parameter as part of a modular input instance that - * contains only a single value. This corresponds to XML fragments of the form: - * - * <pre> - * {@code - * <param name="param1">value11</param> - * } - * </pre> - */ -public class SingleValueParameter extends Parameter { - private final String name; - private final String value; - - // Package private by design. - SingleValueParameter(String name, String value) { - this.name = name; - this.value = value; - } - - /** - * @return the name of this parameter. - */ - public String getName() { - return this.name; - } - - /** - * Returns the parameter as found (as a String), without trying to coerce it to another type. - * - * If your field is Boolean or numeric, use {@code getBoolean} or one of {@code getInt}, {@code getLong}, - * {@code getFloat}, and {@code getDouble} instead. - * - * @return The value of this parameter as a String. - */ - public String getValue() { - return this.value; - } - - /** - * Tries to coerce the value of this parameter to a Boolean. A range of values (true, t, on, 1, y, yes) are - * interpreted as {@code true}, and a similar range (false, f, off, 0, no, n) as {@code false}. Everything - * else, including null, results in a {@code MalformedDataException}. - * - * @return The value of this parameter coerced to a Boolean. - * @throws MalformedDataException If the value cannot be coerced to a boolean. - */ - public boolean getBoolean() throws MalformedDataException { - return XmlUtil.normalizeBoolean(getValue()); - } - - /** - * Coerces the value of this field to an int. - * - * @return An int parsed from this parameter's value. - */ - public int getInt() { - return Integer.parseInt(getValue()); - } - - /** - * Coerces the value of this field to a long. - * - * @return A long parsed from this parameter's value. - */ - public long getLong() { - return Long.parseLong(getValue()); - } - - /** - * Coerces the value of this field to a float. - * - * @return A float parsed from this parameter's value. - */ - public float getFloat() { - return Float.parseFloat(getValue()); - } - - /** - * Coerces the value of this field to a double. - * - * @return A double parsed from this parameter's value. - */ - public double getDouble() { - return Double.parseDouble(getValue()); - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof SingleValueParameter)) { - return false; - } else { - SingleValueParameter that = (SingleValueParameter)other; - return this.getValue().equals(that.getValue()) && this.getName().equals(that.getName()); - } - } - - @Override - public int hashCode() { - return (this.name == null ? 0 : this.name.hashCode()) ^ - (this.value == null ? 0 : this.value.hashCode()); - } -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +/** + * The {@code SingleValueParameter} class represents a parameter as part of a modular input instance that + * contains only a single value. This corresponds to XML fragments of the form: + * + * <pre> + * {@code + * <param name="param1">value11</param> + * } + * </pre> + */ +public class SingleValueParameter extends Parameter { + private final String name; + private final String value; + + // Package private by design. + SingleValueParameter(String name, String value) { + this.name = name; + this.value = value; + } + + /** + * @return the name of this parameter. + */ + public String getName() { + return this.name; + } + + /** + * Returns the parameter as found (as a String), without trying to coerce it to another type. + * + * If your field is Boolean or numeric, use {@code getBoolean} or one of {@code getInt}, {@code getLong}, + * {@code getFloat}, and {@code getDouble} instead. + * + * @return The value of this parameter as a String. + */ + public String getValue() { + return this.value; + } + + /** + * Tries to coerce the value of this parameter to a Boolean. A range of values (true, t, on, 1, y, yes) are + * interpreted as {@code true}, and a similar range (false, f, off, 0, no, n) as {@code false}. Everything + * else, including null, results in a {@code MalformedDataException}. + * + * @return The value of this parameter coerced to a Boolean. + * @throws MalformedDataException If the value cannot be coerced to a boolean. + */ + public boolean getBoolean() throws MalformedDataException { + return XmlUtil.normalizeBoolean(getValue()); + } + + /** + * Coerces the value of this field to an int. + * + * @return An int parsed from this parameter's value. + */ + public int getInt() { + return Integer.parseInt(getValue()); + } + + /** + * Coerces the value of this field to a long. + * + * @return A long parsed from this parameter's value. + */ + public long getLong() { + return Long.parseLong(getValue()); + } + + /** + * Coerces the value of this field to a float. + * + * @return A float parsed from this parameter's value. + */ + public float getFloat() { + return Float.parseFloat(getValue()); + } + + /** + * Coerces the value of this field to a double. + * + * @return A double parsed from this parameter's value. + */ + public double getDouble() { + return Double.parseDouble(getValue()); + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof SingleValueParameter that)) { + return false; + } else { + return this.getValue().equals(that.getValue()) && this.getName().equals(that.getName()); + } + } + + @Override + public int hashCode() { + return (this.name == null ? 0 : this.name.hashCode()) ^ + (this.value == null ? 0 : this.value.hashCode()); + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java b/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java old mode 100755 new mode 100644 index 3ce60106..dc6122f8 --- a/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java +++ b/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java @@ -1,239 +1,238 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -import org.w3c.dom.Document; -import org.w3c.dom.Node; -import org.xml.sax.SAXException; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import java.io.IOException; -import java.io.InputStream; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * The {@code ValidationDefinition} class represents the XML sent by Splunk for external validation of a new modular input. - */ -public class ValidationDefinition { - private Map<String, String> metadata; - - private Map<String, Parameter> parameters; - - private final String serverHostField = "server_host"; - private final String serverUriField = "server_uri"; - private final String checkpointDirField = "checkpoint_dir"; - private final String sessionKeyField = "session_key"; - private final String nameField = "name"; - - // Package private on purpose. - ValidationDefinition() { - super(); - metadata = new HashMap<String, String>(); - } - - /** - * Sets the name of the server on which this modular input is being run. - * - * @param serverHost The name of the server on which this modular input is being run. - */ - void setServerHost(String serverHost) { - this.metadata.put(serverHostField, serverHost); - } - - /** - * Gets the name of the server on which this modular input is being run. - * - * @return The name of the server on which this modular input is being run. - */ - public String getServerHost() { - return this.metadata.get(serverHostField); - } - - /** - * Sets the URI to reach the server on which this modular input is being run. - * - * @param serverUri The URI to reach the server on which this modular input is being run. - */ - void setServerUri(String serverUri) { - this.metadata.put(serverUriField, serverUri); - } - - /** - * Gets the URI to the server on which this modular input is being run. - * - * @return The URI to the server on which this modular input is being run. - */ - public String getServerUri() { - return this.metadata.get(serverUriField); - } - - /** - * Sets the path to write checkpoint files in. - * - * @param checkpointDir The path to write checkpoint files in. - */ - void setCheckpointDir(String checkpointDir) { - this.metadata.put(checkpointDirField, checkpointDir); - } - - /** - * Gets the path to write checkpoint files for restarting inputs in. - * - * @return The path to write checkpoint files for restarting inputs in. - */ - public String getCheckpointDir() { - return this.metadata.get(checkpointDirField); - } - - /** - * Sets a session key that can be used to access splunkd's REST API. - * - * @param sessionKey A session key that can be used to access splunkd's REST API. - */ - void setSessionKey(String sessionKey) { - this.metadata.put(sessionKeyField, sessionKey); - } - - /** - * Gets a session key providing access to splunkd's REST API on this host. - * - * @return A session key providing access to splunkd's REST API on this host. - */ - public String getSessionKey() { - return this.metadata.get(sessionKeyField); - } - - /** - * Sets the name of the proposed modular input instance. - * - * @param name The name of the proposed modular input instance. - */ - void setName(String name) { - this.metadata.put(nameField, name); - } - - /** - * Gets the name of the proposed modular input instance. - * - * @return The name of the proposed modular input instance. - */ - public String getName() { - return this.metadata.get(nameField); - } - - /** - * Sets a list of {@code Parameter} objects giving the proposed configuration. - * - * @param parameters A list of {@code Parameter} objects giving the proposed configuration. - */ - public void setParameters(Collection<Parameter> parameters) { - Map<String, Parameter> paramMap = new HashMap<String, Parameter>(); - for (Parameter p : parameters) { - paramMap.put(p.getName(), p); - } - this.parameters = paramMap; - } - - /** - * @return The parameters on the proposed input. - */ - public Map<String, Parameter> getParameters() { - return this.parameters; - } - - /** - * Create a ValidationDefinition from a provided stream containing XML. The XML typically will look like - * - * <pre> - * {@code - * <items> - * <server_host>myHost</server_host> - * <server_uri>https://127.0.0.1:8089</server_uri> - * <session_key>123102983109283019283</session_key> - * <checkpoint_dir>/opt/splunk/var/lib/splunk/modinputs</checkpoint_dir> - * <item name="myScheme"> - * <param name="param1">value1</param> - * <param_list name="param2"> - * <value>value2</value> - * <value>value3</value> - * <value>value4</value> - * </param_list> - * </item> - * </items> - * } - * </pre> - * - * @param stream containing XML to parse. - * @return a ValidationDefinition. - * @throws ParserConfigurationException if there are errors in setting up the parser (which indicates system - * configuration issues). - * @throws IOException if there is an error in reading from the stream. - * @throws SAXException when the XML is invalid. - * @throws MalformedDataException when the XML does not meet the required schema. - */ - public static ValidationDefinition parseDefinition(InputStream stream) throws ParserConfigurationException, - IOException, SAXException, MalformedDataException { - DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); - documentBuilderFactory.setIgnoringElementContentWhitespace(true); - documentBuilderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); - documentBuilderFactory.setExpandEntityReferences(false); - documentBuilderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); - documentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", false); - DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); - Document doc = documentBuilder.parse(stream); - - ValidationDefinition definition = new ValidationDefinition(); - for (Node node = doc.getDocumentElement().getFirstChild(); node != null; node = node.getNextSibling()) { - if (node.getNodeType() == Node.TEXT_NODE || node.getNodeType() == Node.COMMENT_NODE) { - continue; - } else if (node.getNodeName().equals("item")) { - String name = node.getAttributes().getNamedItem("name").getNodeValue(); - definition.setName(name); - - List<Parameter> parameter = Parameter.nodeToParameterList(node); - definition.setParameters(parameter); - } else { - definition.metadata.put( - node.getNodeName(), - XmlUtil.textInNode(node, "Expected a text value in element " + node.getNodeName()) - ); - } - } - - return definition; - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof ValidationDefinition)) { - return false; - } - ValidationDefinition that = (ValidationDefinition)other; - return this.metadata.equals(that.metadata) && this.parameters.equals(that.parameters); - } - - @Override - public int hashCode() { - return this.metadata.hashCode() ^ (this.parameters == null ? 0 : this.parameters.hashCode()); - } - -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.IOException; +import java.io.InputStream; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * The {@code ValidationDefinition} class represents the XML sent by Splunk for external validation of a new modular input. + */ +public class ValidationDefinition { + private Map<String, String> metadata; + + private Map<String, Parameter> parameters; + + private final String serverHostField = "server_host"; + private final String serverUriField = "server_uri"; + private final String checkpointDirField = "checkpoint_dir"; + private final String sessionKeyField = "session_key"; + private final String nameField = "name"; + + // Package private on purpose. + ValidationDefinition() { + super(); + metadata = new HashMap<>(); + } + + /** + * Sets the name of the server on which this modular input is being run. + * + * @param serverHost The name of the server on which this modular input is being run. + */ + void setServerHost(String serverHost) { + this.metadata.put(serverHostField, serverHost); + } + + /** + * Gets the name of the server on which this modular input is being run. + * + * @return The name of the server on which this modular input is being run. + */ + public String getServerHost() { + return this.metadata.get(serverHostField); + } + + /** + * Sets the URI to reach the server on which this modular input is being run. + * + * @param serverUri The URI to reach the server on which this modular input is being run. + */ + void setServerUri(String serverUri) { + this.metadata.put(serverUriField, serverUri); + } + + /** + * Gets the URI to the server on which this modular input is being run. + * + * @return The URI to the server on which this modular input is being run. + */ + public String getServerUri() { + return this.metadata.get(serverUriField); + } + + /** + * Sets the path to write checkpoint files in. + * + * @param checkpointDir The path to write checkpoint files in. + */ + void setCheckpointDir(String checkpointDir) { + this.metadata.put(checkpointDirField, checkpointDir); + } + + /** + * Gets the path to write checkpoint files for restarting inputs in. + * + * @return The path to write checkpoint files for restarting inputs in. + */ + public String getCheckpointDir() { + return this.metadata.get(checkpointDirField); + } + + /** + * Sets a session key that can be used to access splunkd's REST API. + * + * @param sessionKey A session key that can be used to access splunkd's REST API. + */ + void setSessionKey(String sessionKey) { + this.metadata.put(sessionKeyField, sessionKey); + } + + /** + * Gets a session key providing access to splunkd's REST API on this host. + * + * @return A session key providing access to splunkd's REST API on this host. + */ + public String getSessionKey() { + return this.metadata.get(sessionKeyField); + } + + /** + * Sets the name of the proposed modular input instance. + * + * @param name The name of the proposed modular input instance. + */ + void setName(String name) { + this.metadata.put(nameField, name); + } + + /** + * Gets the name of the proposed modular input instance. + * + * @return The name of the proposed modular input instance. + */ + public String getName() { + return this.metadata.get(nameField); + } + + /** + * Sets a list of {@code Parameter} objects giving the proposed configuration. + * + * @param parameters A list of {@code Parameter} objects giving the proposed configuration. + */ + public void setParameters(Collection<Parameter> parameters) { + Map<String, Parameter> paramMap = new HashMap<>(); + for (Parameter p : parameters) { + paramMap.put(p.getName(), p); + } + this.parameters = paramMap; + } + + /** + * @return The parameters on the proposed input. + */ + public Map<String, Parameter> getParameters() { + return this.parameters; + } + + /** + * Create a ValidationDefinition from a provided stream containing XML. The XML typically will look like + * + * <pre> + * {@code + * <items> + * <server_host>myHost</server_host> + * <server_uri>https://127.0.0.1:8089</server_uri> + * <session_key>123102983109283019283</session_key> + * <checkpoint_dir>/opt/splunk/var/lib/splunk/modinputs</checkpoint_dir> + * <item name="myScheme"> + * <param name="param1">value1</param> + * <param_list name="param2"> + * <value>value2</value> + * <value>value3</value> + * <value>value4</value> + * </param_list> + * </item> + * </items> + * } + * </pre> + * + * @param stream containing XML to parse. + * @return a ValidationDefinition. + * @throws ParserConfigurationException if there are errors in setting up the parser (which indicates system + * configuration issues). + * @throws IOException if there is an error in reading from the stream. + * @throws SAXException when the XML is invalid. + * @throws MalformedDataException when the XML does not meet the required schema. + */ + public static ValidationDefinition parseDefinition(InputStream stream) throws ParserConfigurationException, + IOException, SAXException, MalformedDataException { + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + documentBuilderFactory.setIgnoringElementContentWhitespace(true); + documentBuilderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + documentBuilderFactory.setExpandEntityReferences(false); + documentBuilderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); + documentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", false); + DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); + Document doc = documentBuilder.parse(stream); + + ValidationDefinition definition = new ValidationDefinition(); + for (Node node = doc.getDocumentElement().getFirstChild(); node != null; node = node.getNextSibling()) { + if (node.getNodeType() == Node.TEXT_NODE || node.getNodeType() == Node.COMMENT_NODE) { + continue; + } else if (node.getNodeName().equals("item")) { + String name = node.getAttributes().getNamedItem("name").getNodeValue(); + definition.setName(name); + + List<Parameter> parameter = Parameter.nodeToParameterList(node); + definition.setParameters(parameter); + } else { + definition.metadata.put( + node.getNodeName(), + XmlUtil.textInNode(node, "Expected a text value in element " + node.getNodeName()) + ); + } + } + + return definition; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof ValidationDefinition that)) { + return false; + } + return this.metadata.equals(that.metadata) && this.parameters.equals(that.parameters); + } + + @Override + public int hashCode() { + return this.metadata.hashCode() ^ (this.parameters == null ? 0 : this.parameters.hashCode()); + } + +} From 3a198bdd6b1a57b4be956f8b8ba3775c7fff2787 Mon Sep 17 00:00:00 2001 From: Abhi Shah <abhis@splunk.com> Date: Fri, 4 Aug 2023 14:27:05 +0530 Subject: [PATCH 05/12] Sonarlint fixes and GH action version upgrades --- .github/workflows/test.yml | 6 ++-- .../java/com/splunk/ResultsReaderXml.java | 1 - splunk/src/main/java/com/splunk/Service.java | 28 ++++++++----------- .../java/com/splunk/SimpleCookieStore.java | 1 - .../java/com/splunk/TimestampBinning.java | 2 +- splunk/src/main/java/com/splunk/UdpInput.java | 14 +++++----- splunk/src/main/java/com/splunk/Util.java | 1 - splunk/src/main/java/com/splunk/Value.java | 4 +-- .../java/com/splunk/modularinput/Script.java | 4 +-- .../modularinput/ValidationDefinition.java | 10 +++---- 10 files changed, 32 insertions(+), 39 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index fe8311e8..2e08012c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -35,14 +35,14 @@ jobs: - 10668:10668/udp steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up JDK - uses: actions/setup-java@v1 + uses: actions/setup-java@v3 with: java-version: 17 - name: Cache local Maven repository - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} diff --git a/splunk/src/main/java/com/splunk/ResultsReaderXml.java b/splunk/src/main/java/com/splunk/ResultsReaderXml.java index a41e83b7..ce7c8d0a 100644 --- a/splunk/src/main/java/com/splunk/ResultsReaderXml.java +++ b/splunk/src/main/java/com/splunk/ResultsReaderXml.java @@ -22,7 +22,6 @@ import javax.xml.stream.events.StartElement; import javax.xml.stream.events.XMLEvent; import java.io.*; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; diff --git a/splunk/src/main/java/com/splunk/Service.java b/splunk/src/main/java/com/splunk/Service.java index a655c02e..453bbc99 100644 --- a/splunk/src/main/java/com/splunk/Service.java +++ b/splunk/src/main/java/com/splunk/Service.java @@ -290,29 +290,25 @@ public String fullpath(String path, Args namespace) { String localSharing = ""; // override with invocation namespace if set. - if (namespace != null) { - // URL encode the owner and app. - if (namespace.containsKey("app")) { - try { + try{ + if (namespace != null) { + // URL encode the owner and app. + if (namespace.containsKey("app")) { localApp = URLEncoder.encode((String)namespace.get("app"), "UTF-8"); - } catch (UnsupportedEncodingException e) { - // This is unreachable, since UTF-8 is always supported. - assert false; } - } - if (namespace.containsKey("owner")) { - try { + if (namespace.containsKey("owner")) { localOwner = URLEncoder.encode((String)namespace.get("owner"), "UTF-8"); - } catch (UnsupportedEncodingException e) { - // This is unreachable, since UTF-8 is always supported. - assert false; + } + if (namespace.containsKey("sharing")) { + localSharing = (String)namespace.get("sharing"); } } - if (namespace.containsKey("sharing")) { - localSharing = (String)namespace.get("sharing"); - } + }catch (UnsupportedEncodingException e) { + // This is unreachable, since UTF-8 is always supported. + assert false; } + // sharing, if set calls for special mapping, override here. // "user" --> {user}/{app} // "app" --> nobody/{app} diff --git a/splunk/src/main/java/com/splunk/SimpleCookieStore.java b/splunk/src/main/java/com/splunk/SimpleCookieStore.java index 0dc12d2f..13e6f2ea 100644 --- a/splunk/src/main/java/com/splunk/SimpleCookieStore.java +++ b/splunk/src/main/java/com/splunk/SimpleCookieStore.java @@ -21,7 +21,6 @@ import java.net.HttpCookie; import java.util.Map; import java.util.HashMap; -import java.lang.StringBuilder; /** * The {@code SimpleCookieStore} class stores cookies for authentication. diff --git a/splunk/src/main/java/com/splunk/TimestampBinning.java b/splunk/src/main/java/com/splunk/TimestampBinning.java index de3cfe04..d0dbc55f 100644 --- a/splunk/src/main/java/com/splunk/TimestampBinning.java +++ b/splunk/src/main/java/com/splunk/TimestampBinning.java @@ -40,4 +40,4 @@ public enum TimestampBinning { SECOND { public String toString() { return "second"; } } -}; +} diff --git a/splunk/src/main/java/com/splunk/UdpInput.java b/splunk/src/main/java/com/splunk/UdpInput.java index baa6ff7d..1b5b440c 100644 --- a/splunk/src/main/java/com/splunk/UdpInput.java +++ b/splunk/src/main/java/com/splunk/UdpInput.java @@ -260,12 +260,12 @@ public void setSourceType(String sourcetype) { * @throws IOException The IOException instance */ public void submit(String eventBody) throws IOException { - DatagramSocket socket = new DatagramSocket(); - InetAddress address = InetAddress.getByName(this.service.getHost()); - int port = this.getPort(); - byte[] buffer = eventBody.getBytes("UTF-8"); - DatagramPacket packet = new DatagramPacket(buffer, buffer.length, address, port); - socket.send(packet); - socket.close(); + try(DatagramSocket socket = new DatagramSocket()){ + InetAddress address = InetAddress.getByName(this.service.getHost()); + int port = this.getPort(); + byte[] buffer = eventBody.getBytes("UTF-8"); + DatagramPacket packet = new DatagramPacket(buffer, buffer.length, address, port); + socket.send(packet); + } } } diff --git a/splunk/src/main/java/com/splunk/Util.java b/splunk/src/main/java/com/splunk/Util.java index dc9b9d10..f11c6bc1 100644 --- a/splunk/src/main/java/com/splunk/Util.java +++ b/splunk/src/main/java/com/splunk/Util.java @@ -16,7 +16,6 @@ package com.splunk; -import java.io.File; import java.util.Arrays; import java.util.List; diff --git a/splunk/src/main/java/com/splunk/Value.java b/splunk/src/main/java/com/splunk/Value.java index 812bfcad..a4c2d333 100644 --- a/splunk/src/main/java/com/splunk/Value.java +++ b/splunk/src/main/java/com/splunk/Value.java @@ -39,9 +39,9 @@ static boolean toBoolean(String value) { return false; if (value.equals("1")) return true; - if (value.toLowerCase().equals("false")) + if (value.equalsIgnoreCase("false")) return false; - if (value.toLowerCase().equals("true")) + if (value.equalsIgnoreCase("true")) return true; String message = String.format("Value error: '%s'", value); throw new RuntimeException(message); diff --git a/splunk/src/main/java/com/splunk/modularinput/Script.java b/splunk/src/main/java/com/splunk/modularinput/Script.java index 96447c73..ac470816 100755 --- a/splunk/src/main/java/com/splunk/modularinput/Script.java +++ b/splunk/src/main/java/com/splunk/modularinput/Script.java @@ -66,7 +66,7 @@ public int run(String[] args, EventWriter eventWriter, InputStream in) { streamEvents(inputDefinition, eventWriter); eventWriter.close(); return 0; - } else if (args[0].toLowerCase().equals("--scheme")) { + } else if (args[0].equalsIgnoreCase("--scheme")) { // Splunk has requested XML specifying the scheme for this modular input. Return it and exit. Scheme scheme = getScheme(); if (scheme == null) { @@ -76,7 +76,7 @@ public int run(String[] args, EventWriter eventWriter, InputStream in) { eventWriter.writeXmlDocument(scheme.toXml()); return 0; } - } else if (args[0].toLowerCase().equals("--validate-arguments")) { + } else if (args[0].equalsIgnoreCase("--validate-arguments")) { NonblockingInputStream stream = new NonblockingInputStream(in); ValidationDefinition validationDefinition = ValidationDefinition.parseDefinition(stream); diff --git a/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java b/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java index dc6122f8..ea282db8 100644 --- a/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java +++ b/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java @@ -38,11 +38,11 @@ public class ValidationDefinition { private Map<String, Parameter> parameters; - private final String serverHostField = "server_host"; - private final String serverUriField = "server_uri"; - private final String checkpointDirField = "checkpoint_dir"; - private final String sessionKeyField = "session_key"; - private final String nameField = "name"; + private static final String serverHostField = "server_host"; + private static final String serverUriField = "server_uri"; + private static final String checkpointDirField = "checkpoint_dir"; + private static final String sessionKeyField = "session_key"; + private static final String nameField = "name"; // Package private on purpose. ValidationDefinition() { From de8b5eba07a6fce2ba1807da01558b55991977d3 Mon Sep 17 00:00:00 2001 From: Abhi Shah <abhis@splunk.com> Date: Mon, 7 Aug 2023 15:00:39 +0530 Subject: [PATCH 06/12] Sonarlint changes --- .github/workflows/release.yml | 7 ++++--- .github/workflows/test.yml | 1 + splunk/src/main/java/com/splunk/IPv4PivotFilter.java | 1 - splunk/src/main/java/com/splunk/IndexCollection.java | 2 +- splunk/src/main/java/com/splunk/Input.java | 1 - splunk/src/main/java/com/splunk/InputCollection.java | 6 ++---- splunk/src/main/java/com/splunk/InputKind.java | 2 +- splunk/src/main/java/com/splunk/JobArgs.java | 4 ++-- splunk/src/main/java/com/splunk/JobCollection.java | 5 ++--- splunk/src/main/java/com/splunk/JobEventsArgs.java | 4 ++-- splunk/src/main/java/com/splunk/JobExportArgs.java | 6 +++--- splunk/src/main/java/com/splunk/JobResultsArgs.java | 2 +- splunk/src/main/java/com/splunk/JobResultsPreviewArgs.java | 2 +- splunk/src/main/java/com/splunk/LicensePool.java | 1 - splunk/src/main/java/com/splunk/ModularInputKind.java | 4 +--- .../src/main/java/com/splunk/ModularInputKindArgument.java | 2 +- splunk/src/main/java/com/splunk/PasswordCollection.java | 2 +- splunk/src/main/java/com/splunk/PivotFilter.java | 3 --- splunk/src/main/java/com/splunk/RangePivotRowSplit.java | 1 - splunk/src/main/java/com/splunk/Receiver.java | 3 +-- splunk/src/test/java/com/splunk/UtilTest.java | 2 +- 21 files changed, 25 insertions(+), 36 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d008e42b..8d75e68e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,11 +12,12 @@ jobs: name: Java SDK Release runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up JDK 1.8 - uses: actions/setup-java@v1 + uses: actions/setup-java@v3 with: - java-version: 1.8 + java-version: 17 + distribution: oracle server-username: MAVEN_USERNAME server-password: MAVEN_PASSWORD server-id: splunk-artifactory diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2e08012c..f6e39ed1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -39,6 +39,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v3 with: + distribution: oracle java-version: 17 - name: Cache local Maven repository diff --git a/splunk/src/main/java/com/splunk/IPv4PivotFilter.java b/splunk/src/main/java/com/splunk/IPv4PivotFilter.java index 9fe794d4..af07fa76 100644 --- a/splunk/src/main/java/com/splunk/IPv4PivotFilter.java +++ b/splunk/src/main/java/com/splunk/IPv4PivotFilter.java @@ -19,7 +19,6 @@ import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; -import java.net.Inet4Address; /** * Represents a filter on an IPv4 valued field in a pivot. diff --git a/splunk/src/main/java/com/splunk/IndexCollection.java b/splunk/src/main/java/com/splunk/IndexCollection.java index cf98d95a..38c18453 100644 --- a/splunk/src/main/java/com/splunk/IndexCollection.java +++ b/splunk/src/main/java/com/splunk/IndexCollection.java @@ -49,7 +49,7 @@ public Index remove(String key) { "Indexes cannot be deleted via the REST API in versions " + "prior to 5.0"); } else { - return (Index)super.remove(key); + return super.remove(key); } } } diff --git a/splunk/src/main/java/com/splunk/Input.java b/splunk/src/main/java/com/splunk/Input.java index 95ca6c8d..2029e3e3 100644 --- a/splunk/src/main/java/com/splunk/Input.java +++ b/splunk/src/main/java/com/splunk/Input.java @@ -16,7 +16,6 @@ package com.splunk; -import java.util.Map; /** * The {@code Input} class represents a data input. This class is the base for diff --git a/splunk/src/main/java/com/splunk/InputCollection.java b/splunk/src/main/java/com/splunk/InputCollection.java index d8baf3a9..e94b2c28 100644 --- a/splunk/src/main/java/com/splunk/InputCollection.java +++ b/splunk/src/main/java/com/splunk/InputCollection.java @@ -295,13 +295,11 @@ private void refreshInputKinds() { // Iterate over all input kinds and collect all instances. for (InputKind kind : this.inputKinds) { - if (service.versionIsAtLeast("6.0.0")) { + if (service.versionIsAtLeast("6.0.0") && kind.getKind().equals("registry")) { // In Splunk 6 and later, the registry endpoint has been deprecated in favor of the new // WinRegMon modular input, but both now point to the same place. To avoid duplicates, we have // to read only one of them. - if (kind.getKind().equals("registry")) { - continue; - } + continue; } String relpath = kind.getRelativePath(); String inputs = String.format("%s/%s?count=-1", path, relpath); diff --git a/splunk/src/main/java/com/splunk/InputKind.java b/splunk/src/main/java/com/splunk/InputKind.java index f3cd62b3..f85ebe85 100644 --- a/splunk/src/main/java/com/splunk/InputKind.java +++ b/splunk/src/main/java/com/splunk/InputKind.java @@ -30,7 +30,7 @@ public class InputKind { private String relpath; private Class<? extends Input> inputClass; - private static Map<String, InputKind> knownRelpaths = new HashMap<String, InputKind>(); + private static Map<String, InputKind> knownRelpaths = new HashMap<>(); /** Unknown type of input. */ public static final InputKind Unknown = new InputKind(null, Input.class, "unknown"); diff --git a/splunk/src/main/java/com/splunk/JobArgs.java b/splunk/src/main/java/com/splunk/JobArgs.java index d0f5eaf4..8601383c 100644 --- a/splunk/src/main/java/com/splunk/JobArgs.java +++ b/splunk/src/main/java/com/splunk/JobArgs.java @@ -25,7 +25,7 @@ public class JobArgs extends Args { * Specifies how to create a job using the {@link JobCollection#create} * method. */ - public static enum ExecutionMode { + public enum ExecutionMode { /** Runs a search asynchronously and returns a search job immediately.*/ NORMAL("normal"), /** Runs a search synchronously and does not return a search job until @@ -53,7 +53,7 @@ public String toString() { * Specifies how to create a job using the {@link JobCollection#create} * method. */ - public static enum SearchMode { + public enum SearchMode { /** * Searches historical data. */ diff --git a/splunk/src/main/java/com/splunk/JobCollection.java b/splunk/src/main/java/com/splunk/JobCollection.java index 661e21ff..d776f08e 100644 --- a/splunk/src/main/java/com/splunk/JobCollection.java +++ b/splunk/src/main/java/com/splunk/JobCollection.java @@ -74,9 +74,8 @@ public Job create(String query) { * @return The unique search identifier (SID). */ public Job create(String query, Map args) { - if (args != null && args.containsKey("exec_mode")) { - if (args.get("exec_mode").equals("oneshot")) - throw new RuntimeException(oneShotNotAllowed); + if (args != null && args.containsKey("exec_mode") && args.get("exec_mode").equals("oneshot")) { + throw new RuntimeException(oneShotNotAllowed); } args = Args.create(args).add("search", query); ResponseMessage response = service.post(path, args); diff --git a/splunk/src/main/java/com/splunk/JobEventsArgs.java b/splunk/src/main/java/com/splunk/JobEventsArgs.java index 233e3c1b..4268a590 100644 --- a/splunk/src/main/java/com/splunk/JobEventsArgs.java +++ b/splunk/src/main/java/com/splunk/JobEventsArgs.java @@ -25,7 +25,7 @@ public class JobEventsArgs extends Args { /** * Specifies the format for the returned output. */ - public static enum OutputMode { + public enum OutputMode { /** Returns output in Atom format. */ ATOM("atom"), /** Returns output in CSV format. */ @@ -59,7 +59,7 @@ public String toString() { * Specifies how to truncate lines to achieve the value in * {@link #setMaximumLines}. */ - public static enum TruncationMode { + public enum TruncationMode { /** Use the "abstract" truncation mode.*/ ABSTRACT("abstract"), /** Use the "truncate" truncation mode.*/ diff --git a/splunk/src/main/java/com/splunk/JobExportArgs.java b/splunk/src/main/java/com/splunk/JobExportArgs.java index 5d5f8930..5d728d4a 100644 --- a/splunk/src/main/java/com/splunk/JobExportArgs.java +++ b/splunk/src/main/java/com/splunk/JobExportArgs.java @@ -25,7 +25,7 @@ public class JobExportArgs extends Args { /** * Specifies the format for the returned output. */ - public static enum OutputMode { + public enum OutputMode { /** Returns output in Atom format. */ ATOM("atom"), /** Returns output in CSV format. */ @@ -59,7 +59,7 @@ public String toString() { * Specifies how to create a job using the {@link JobCollection#create} * method. */ - public static enum SearchMode { + public enum SearchMode { /** * Searches historical data. */ @@ -98,7 +98,7 @@ public String toString() { * Specifies how to truncate lines to achieve the value in * {@link #setMaximumLines}. */ - public static enum TruncationMode { + public enum TruncationMode { /** Use the "abstract" truncation mode.*/ ABSTRACT("abstract"), /** Use the "truncate" truncation mode.*/ diff --git a/splunk/src/main/java/com/splunk/JobResultsArgs.java b/splunk/src/main/java/com/splunk/JobResultsArgs.java index a35bbd8b..5e60ecc7 100644 --- a/splunk/src/main/java/com/splunk/JobResultsArgs.java +++ b/splunk/src/main/java/com/splunk/JobResultsArgs.java @@ -25,7 +25,7 @@ public class JobResultsArgs extends Args { /** * Specifies the format for the returned output. */ - public static enum OutputMode { + public enum OutputMode { /** Returns output in Atom format. */ ATOM("atom"), /** Returns output in CSV format. */ diff --git a/splunk/src/main/java/com/splunk/JobResultsPreviewArgs.java b/splunk/src/main/java/com/splunk/JobResultsPreviewArgs.java index d929f010..ce0ba673 100644 --- a/splunk/src/main/java/com/splunk/JobResultsPreviewArgs.java +++ b/splunk/src/main/java/com/splunk/JobResultsPreviewArgs.java @@ -25,7 +25,7 @@ public class JobResultsPreviewArgs extends Args { /** * Specifies the format for the returned output. */ - public static enum OutputMode { + public enum OutputMode { /** Returns output in Atom format. */ ATOM("atom"), /** Returns output in CSV format. */ diff --git a/splunk/src/main/java/com/splunk/LicensePool.java b/splunk/src/main/java/com/splunk/LicensePool.java index 527d0554..7c0e38f5 100644 --- a/splunk/src/main/java/com/splunk/LicensePool.java +++ b/splunk/src/main/java/com/splunk/LicensePool.java @@ -17,7 +17,6 @@ package com.splunk; import java.util.HashMap; -import java.util.List; import java.util.Map; /** diff --git a/splunk/src/main/java/com/splunk/ModularInputKind.java b/splunk/src/main/java/com/splunk/ModularInputKind.java index 73253990..9f5857f6 100644 --- a/splunk/src/main/java/com/splunk/ModularInputKind.java +++ b/splunk/src/main/java/com/splunk/ModularInputKind.java @@ -17,7 +17,6 @@ import java.util.HashMap; import java.util.Map; -import java.util.List; /** * The {@code ModularInputKind} class represents a particular modular input. @@ -60,8 +59,7 @@ public Map<String, ModularInputKindArgument> getArguments() { * @return The streaming mode ("xml" or "simple"). */ public String getStreamingMode() { - String mode = getString("streaming_mode"); - return mode; + return getString("streaming_mode"); } /** diff --git a/splunk/src/main/java/com/splunk/ModularInputKindArgument.java b/splunk/src/main/java/com/splunk/ModularInputKindArgument.java index 6f42de3d..3ac28fa0 100644 --- a/splunk/src/main/java/com/splunk/ModularInputKindArgument.java +++ b/splunk/src/main/java/com/splunk/ModularInputKindArgument.java @@ -23,7 +23,7 @@ * specialized to represent arguments for modular input kinds. */ public class ModularInputKindArgument extends HashMap<String,String> { - public enum Type { NUMBER, STRING, BOOLEAN }; + public enum Type { NUMBER, STRING, BOOLEAN } /** * Class constructor. diff --git a/splunk/src/main/java/com/splunk/PasswordCollection.java b/splunk/src/main/java/com/splunk/PasswordCollection.java index de4faa7b..1379a404 100644 --- a/splunk/src/main/java/com/splunk/PasswordCollection.java +++ b/splunk/src/main/java/com/splunk/PasswordCollection.java @@ -116,7 +116,7 @@ public Password remove(String key) { } // Make it compatible with the old way (low-efficient) if (!key.contains(":")) { - Password password = getByUsername((String) key); + Password password = getByUsername(key); validate(); if (password == null) return null; password.remove(); diff --git a/splunk/src/main/java/com/splunk/PivotFilter.java b/splunk/src/main/java/com/splunk/PivotFilter.java index 21645ef3..ccd3f361 100644 --- a/splunk/src/main/java/com/splunk/PivotFilter.java +++ b/splunk/src/main/java/com/splunk/PivotFilter.java @@ -17,9 +17,6 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import com.google.gson.JsonPrimitive; - -import java.util.ArrayList; /** * Base class representing filters in pivots. diff --git a/splunk/src/main/java/com/splunk/RangePivotRowSplit.java b/splunk/src/main/java/com/splunk/RangePivotRowSplit.java index a6882ce4..a1e2c8f0 100644 --- a/splunk/src/main/java/com/splunk/RangePivotRowSplit.java +++ b/splunk/src/main/java/com/splunk/RangePivotRowSplit.java @@ -17,7 +17,6 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import com.google.gson.JsonPrimitive; /** * Split values of a field into rows by ranges of a numeric field. diff --git a/splunk/src/main/java/com/splunk/Receiver.java b/splunk/src/main/java/com/splunk/Receiver.java index 8ae4d826..b8afc43b 100644 --- a/splunk/src/main/java/com/splunk/Receiver.java +++ b/splunk/src/main/java/com/splunk/Receiver.java @@ -18,7 +18,6 @@ import java.io.*; import java.net.Socket; -import java.lang.StringBuilder; import java.util.ArrayList; import java.util.List; @@ -108,7 +107,7 @@ public Socket attach(String indexName, Args args) throws IOException { headers.add(String.format("Authorization: %s", service.getToken())); } headers.add(""); - headers.forEach(header -> writer.println(header)); + headers.forEach(writer::println); writer.flush(); return socket; } diff --git a/splunk/src/test/java/com/splunk/UtilTest.java b/splunk/src/test/java/com/splunk/UtilTest.java index a4a582e4..0634e8c1 100644 --- a/splunk/src/test/java/com/splunk/UtilTest.java +++ b/splunk/src/test/java/com/splunk/UtilTest.java @@ -75,7 +75,7 @@ public void testSubstringAfterFails() { @Test public void testArgs() { Args args = Args.create(); - Assert.assertTrue(args != null); + Assert.assertNotNull(args); Assert.assertTrue(args instanceof Args); Assert.assertTrue(Args.encode((String) null).equals("")); From 6b514fd7fe690dc435544606eb0b277446bd2930 Mon Sep 17 00:00:00 2001 From: Abhi Shah <abhis@splunk.com> Date: Wed, 25 Oct 2023 17:47:16 +0530 Subject: [PATCH 07/12] Sonarlint changes --- splunk/src/main/java/com/splunk/Args.java | 370 +-- splunk/src/main/java/com/splunk/AtomFeed.java | 230 +- .../src/main/java/com/splunk/AtomObject.java | 496 +-- .../main/java/com/splunk/CollectionArgs.java | 354 +- splunk/src/main/java/com/splunk/Command.java | 474 ++- .../src/main/java/com/splunk/DataModel.java | 604 ++-- .../java/com/splunk/DataModelCalculation.java | 330 +- .../main/java/com/splunk/DataModelObject.java | 788 ++--- splunk/src/main/java/com/splunk/Entity.java | 986 +++--- .../java/com/splunk/EntityCollection.java | 2 +- splunk/src/main/java/com/splunk/Event.java | 428 +-- .../java/com/splunk/ExportResultsStream.java | 62 +- .../src/main/java/com/splunk/FieldType.java | 142 +- .../main/java/com/splunk/FiredAlertGroup.java | 86 +- .../src/main/java/com/splunk/HttpService.java | 1238 +++---- splunk/src/main/java/com/splunk/Index.java | 2224 ++++++------- .../main/java/com/splunk/InputCollection.java | 810 ++--- .../src/main/java/com/splunk/LicensePool.java | 344 +- .../java/com/splunk/ModularInputKind.java | 218 +- .../java/com/splunk/MultiResultsReader.java | 124 +- .../com/splunk/MultiResultsReaderJson.java | 82 +- .../com/splunk/MultiResultsReaderXml.java | 84 +- .../java/com/splunk/PasswordCollection.java | 304 +- .../java/com/splunk/PivotSpecification.java | 982 +++--- .../main/java/com/splunk/RequestMessage.java | 228 +- splunk/src/main/java/com/splunk/Resource.java | 2 +- .../java/com/splunk/ResourceCollection.java | 772 ++--- .../main/java/com/splunk/ResponseMessage.java | 160 +- .../main/java/com/splunk/ResultsReader.java | 356 +- .../java/com/splunk/ResultsReaderCsv.java | 232 +- .../java/com/splunk/ResultsReaderJson.java | 712 ++-- .../java/com/splunk/ResultsReaderXml.java | 854 ++--- .../com/splunk/SavedSearchCollectionArgs.java | 106 +- .../main/java/com/splunk/SearchResults.java | 76 +- splunk/src/main/java/com/splunk/Service.java | 2954 ++++++++--------- splunk/src/main/java/com/splunk/Settings.java | 578 ++-- .../java/com/splunk/SimpleCookieStore.java | 184 +- .../java/com/splunk/StreamIterableBase.java | 188 +- splunk/src/main/java/com/splunk/TcpInput.java | 624 ++-- .../splunk/modularinput/InputDefinition.java | 448 +-- .../modularinput/MultiValueParameter.java | 164 +- .../com/splunk/modularinput/Parameter.java | 196 +- .../java/com/splunk/modularinput/Scheme.java | 488 +-- .../modularinput/SingleValueParameter.java | 240 +- .../modularinput/ValidationDefinition.java | 476 +-- .../test/java/com/splunk/ApplicationTest.java | 470 +-- .../test/java/com/splunk/AtomFeedTest.java | 4 +- .../src/test/java/com/splunk/CookieTest.java | 2 +- .../test/java/com/splunk/DataModelTest.java | 2 +- .../java/com/splunk/DeploymentServerTest.java | 2 +- .../java/com/splunk/DeploymentTenantTest.java | 2 +- .../test/java/com/splunk/EventTypesTest.java | 2 +- .../com/splunk/ExportResultsReaderTest.java | 4 +- .../test/java/com/splunk/HttpServiceTest.java | 7 +- .../src/test/java/com/splunk/IndexTest.java | 5 +- .../java/com/splunk/LicenseMessageTest.java | 2 +- .../com/splunk/ModularInputKindsTest.java | 10 +- .../test/java/com/splunk/OutputGroupTest.java | 2 +- .../java/com/splunk/OutputServerTest.java | 2 +- .../java/com/splunk/OutputSyslogTest.java | 2 +- .../test/java/com/splunk/PasswordTest.java | 2 +- .../test/java/com/splunk/ReceiverTest.java | 24 +- .../java/com/splunk/ResultsReaderTest.java | 8 +- .../ResultsReaderTestFromExpectedFile.java | 10 +- .../src/test/java/com/splunk/SDKTestCase.java | 30 +- .../test/java/com/splunk/SavedSearchTest.java | 2 +- .../test/java/com/splunk/SearchJobTest.java | 15 +- .../src/test/java/com/splunk/ServiceTest.java | 4 +- splunk/src/test/java/com/splunk/UtilTest.java | 6 +- .../modularinput/InputDefinitionTest.java | 7 +- .../modularinput/ModularInputTestCase.java | 17 +- .../ValidationDefinitionTest.java | 2 +- .../test/java/com/splunk/splunk.license.xml | 33 + .../splunk_at_least_cupcake.license.xml | 33 + 74 files changed, 11281 insertions(+), 11230 deletions(-) create mode 100644 splunk/src/test/java/com/splunk/splunk.license.xml create mode 100644 splunk/src/test/java/com/splunk/splunk_at_least_cupcake.license.xml diff --git a/splunk/src/main/java/com/splunk/Args.java b/splunk/src/main/java/com/splunk/Args.java index 84706ed6..f5853406 100644 --- a/splunk/src/main/java/com/splunk/Args.java +++ b/splunk/src/main/java/com/splunk/Args.java @@ -1,185 +1,185 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Map.Entry; - -/** - * The {@code Args} class is a helper class for working with Splunk REST API - * arguments. - * - * This extension is used mainly for encoding arguments for UTF8 transmission - * to a Splunk instance in a key=value pairing for a string, or - * {@code key=value1&key=value2 } (and so on) for an array of strings. - */ -public class Args extends LinkedHashMap<String, Object> { - - /** - * Class constructor. - */ - public Args() { super(); } - - /** - * Class constructor. Initializes a single key-value pair. - * - * @param key The key name. - * @param value The value, as a {@code String:String} or - * {@code String:String[]}. - */ - public Args(String key, Object value) { - super(); - put(key, value); - } - - /** - * Class constructor. Initializes a pre-existing hash map. - * - * @param values A set of key-value pairs. - */ - public Args(Map<String, Object> values) { - super(values); - } - - /** - * Adds an argument to an {@code Args} object. - * - * @param key The key name. - * @param value The value, as a {@code String:String} or - * {@code String:String[]}. - * @return This {@code Args} set. - */ - public Args add(String key, Object value) { - put(key, value); - return this; - } - - /** - * Creates a new empty instance of {@code Args}. - * - * @return The {@code Args} instance. - */ - public static Args create() { - return new Args(); - } - - /** - * Creates a new {@code Args} instance and initializes it with a single - * key-value pair. - * - * @param key The key name. - * @param value The value, as a {@code String:String} or - * {@code String:String[]}. - * @return The {@code Args} instance. - */ - public static Args create(String key, Object value) { - return new Args(key, value); - } - - /** - * Creates a new {@code Args} instance and initializes it with a - * pre-existing hash map. - * - * @param values The pre-existing hash map. - * @return The {@code Args} instance. - */ - public static Args create(Map<String, Object> values) { - return values == null ? new Args() : new Args(values); - } - - /** - * Encodes a single string with UTF8 encoding. - * - * @param value The string. - * @return The encoded string. - */ - public static String encode(String value) { - if (value == null) return ""; - String result = null; - try { - result = URLEncoder.encode(value, "UTF-8"); - } - catch (UnsupportedEncodingException e) { assert false; } - return result; - } - - /** - * Encodes a hash map of {@code String:String} or {@code String:String[]} - * into a single UTF8-encoded string. - * - * @param args The hash map. - * @return The string. - */ - public static String encode(Map<String, Object> args) { - return Args.create(args).encode(); - } - - // Encodes an argument with a list-valued argument. - private void - encodeValues(StringBuilder builder, String key, String[] values) { - key = encode(key); - for (String value : values) { - if (builder.length() > 0) builder.append('&'); - builder.append(key); - builder.append('='); - builder.append(encode(value)); - } - } - - /** - * Encodes an {@code Args} instance into a UTF8-encoded string. - * - * @return The UTF8-encoded string. - */ - public String encode() { - StringBuilder builder = new StringBuilder(); - for (Entry<String, Object> entry : entrySet()) { - if (builder.length() > 0) builder.append('&'); - String key = entry.getKey(); - Object value = entry.getValue(); - if (value instanceof String[] valueInst) { - encodeValues(builder, key, valueInst); - } - else { - builder.append(encode(key)); - builder.append('='); - builder.append(encode(value.toString())); - } - } - return builder.toString(); - } - - /** - * Returns the hash-map value of a specific key, or the default value if - * the key is not found. - * - * @param args The hash map. - * @param key The key to look for. - * @param defaultValue The default value, if the key is not found. - * @param <T> The class type. - * @return The value. - */ - public static <T> T - get(Map<String, Object> args, String key, T defaultValue) { - if (!args.containsKey(key)) return defaultValue; - return (T)args.get(key); - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; + +/** + * The {@code Args} class is a helper class for working with Splunk REST API + * arguments. + * + * This extension is used mainly for encoding arguments for UTF8 transmission + * to a Splunk instance in a key=value pairing for a string, or + * {@code key=value1&key=value2 } (and so on) for an array of strings. + */ +public class Args extends LinkedHashMap<String, Object> { + + /** + * Class constructor. + */ + public Args() { super(); } + + /** + * Class constructor. Initializes a single key-value pair. + * + * @param key The key name. + * @param value The value, as a {@code String:String} or + * {@code String:String[]}. + */ + public Args(String key, Object value) { + super(); + put(key, value); + } + + /** + * Class constructor. Initializes a pre-existing hash map. + * + * @param values A set of key-value pairs. + */ + public Args(Map<String, Object> values) { + super(values); + } + + /** + * Adds an argument to an {@code Args} object. + * + * @param key The key name. + * @param value The value, as a {@code String:String} or + * {@code String:String[]}. + * @return This {@code Args} set. + */ + public Args add(String key, Object value) { + put(key, value); + return this; + } + + /** + * Creates a new empty instance of {@code Args}. + * + * @return The {@code Args} instance. + */ + public static Args create() { + return new Args(); + } + + /** + * Creates a new {@code Args} instance and initializes it with a single + * key-value pair. + * + * @param key The key name. + * @param value The value, as a {@code String:String} or + * {@code String:String[]}. + * @return The {@code Args} instance. + */ + public static Args create(String key, Object value) { + return new Args(key, value); + } + + /** + * Creates a new {@code Args} instance and initializes it with a + * pre-existing hash map. + * + * @param values The pre-existing hash map. + * @return The {@code Args} instance. + */ + public static Args create(Map<String, Object> values) { + return values == null ? new Args() : new Args(values); + } + + /** + * Encodes a single string with UTF8 encoding. + * + * @param value The string. + * @return The encoded string. + */ + public static String encode(String value) { + if (value == null) return ""; + String result = null; + try { + result = URLEncoder.encode(value, "UTF-8"); + } + catch (UnsupportedEncodingException e) { assert false; } + return result; + } + + /** + * Encodes a hash map of {@code String:String} or {@code String:String[]} + * into a single UTF8-encoded string. + * + * @param args The hash map. + * @return The string. + */ + public static String encode(Map<String, Object> args) { + return Args.create(args).encode(); + } + + // Encodes an argument with a list-valued argument. + private void + encodeValues(StringBuilder builder, String key, String[] values) { + key = encode(key); + for (String value : values) { + if (builder.length() > 0) builder.append('&'); + builder.append(key); + builder.append('='); + builder.append(encode(value)); + } + } + + /** + * Encodes an {@code Args} instance into a UTF8-encoded string. + * + * @return The UTF8-encoded string. + */ + public String encode() { + StringBuilder builder = new StringBuilder(); + for (Entry<String, Object> entry : entrySet()) { + if (builder.length() > 0) builder.append('&'); + String key = entry.getKey(); + Object value = entry.getValue(); + if (value instanceof String[] valueInst) { + encodeValues(builder, key, valueInst); + } + else { + builder.append(encode(key)); + builder.append('='); + builder.append(encode(value.toString())); + } + } + return builder.toString(); + } + + /** + * Returns the hash-map value of a specific key, or the default value if + * the key is not found. + * + * @param args The hash map. + * @param key The key to look for. + * @param defaultValue The default value, if the key is not found. + * @param <T> The class type. + * @return The value. + */ + public static <T> T + get(Map<String, Object> args, String key, T defaultValue) { + if (!args.containsKey(key)) return defaultValue; + return (T)args.get(key); + } +} + diff --git a/splunk/src/main/java/com/splunk/AtomFeed.java b/splunk/src/main/java/com/splunk/AtomFeed.java index 81554db5..2cacb86e 100644 --- a/splunk/src/main/java/com/splunk/AtomFeed.java +++ b/splunk/src/main/java/com/splunk/AtomFeed.java @@ -1,115 +1,115 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.InputStream; -import java.util.*; -import javax.xml.stream.XMLStreamException; -import javax.xml.stream.XMLStreamReader; -import javax.xml.stream.XMLStreamConstants; - -/** - * The {@code AtomFeed} class represents an Atom feed. - */ -public class AtomFeed extends AtomObject { - /** The list of Atom entries contained in this {@code AtomFeed} object. */ - public ArrayList<AtomEntry> entries = new ArrayList<>(); - - /** The value of the Atom feed's {@code <itemsPerPage>} element. */ - public String itemsPerPage = null; - - /** The value of the Atom feed's {@code <startIndex>} element. */ - public String startIndex = null; - - /** The value of the Atom feed's {@code <totalResults>} element. */ - public String totalResults = null; - - /** - * Creates a new {@code AtomFeed} instance. - * - * @return A new {@code AtomFeed} instance. - */ - static AtomFeed create() { - return new AtomFeed(); - } - - /** - * Creates a new {@code AtomFeed} instance based on the given stream. - * - * @param input The input stream. - * @return An {@code AtomFeed} instance representing the parsed stream. - */ - public static AtomFeed parseStream(InputStream input) { - XMLStreamReader reader = createReader(input); - - AtomFeed result = AtomFeed.parse(reader); - - try { - reader.close(); - } - catch (XMLStreamException e) { - throw new RuntimeException(e.getMessage(), e); - } - - return result; - } - - /** - * Creates a new {@code AtomFeed} instance based on a given XML element. - * - * @param input The XML stream. - * @return An {@code AtomFeed} instance representing the parsed element. - * @throws RuntimeException The runtime exception if a parse error occurs. - */ - static AtomFeed parse(XMLStreamReader input) { - AtomFeed feed = AtomFeed.create(); - feed.load(input, "feed"); - return feed; - } - - /** - * Initializes the current instance from a given XML element. - * - * @param reader The XML reader. - */ - @Override void init(XMLStreamReader reader) { - assert reader.isStartElement(); - - String name = reader.getLocalName(); - - if (name.equals("entry")) { - AtomEntry entry = AtomEntry.parse(reader); - this.entries.add(entry); - } - else if (name.equals("messages")) { - parseEnd(reader); - } - else if (name.equals("totalResults")) { - this.totalResults = parseText(reader); - } - else if (name.equals("itemsPerPage")) { - this.itemsPerPage = parseText(reader); - } - else if (name.equals("startIndex")) { - this.startIndex = parseText(reader); - } - else { - super.init(reader); - } - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.InputStream; +import java.util.*; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; +import javax.xml.stream.XMLStreamConstants; + +/** + * The {@code AtomFeed} class represents an Atom feed. + */ +public class AtomFeed extends AtomObject { + /** The list of Atom entries contained in this {@code AtomFeed} object. */ + public ArrayList<AtomEntry> entries = new ArrayList<>(); + + /** The value of the Atom feed's {@code <itemsPerPage>} element. */ + public String itemsPerPage = null; + + /** The value of the Atom feed's {@code <startIndex>} element. */ + public String startIndex = null; + + /** The value of the Atom feed's {@code <totalResults>} element. */ + public String totalResults = null; + + /** + * Creates a new {@code AtomFeed} instance. + * + * @return A new {@code AtomFeed} instance. + */ + static AtomFeed create() { + return new AtomFeed(); + } + + /** + * Creates a new {@code AtomFeed} instance based on the given stream. + * + * @param input The input stream. + * @return An {@code AtomFeed} instance representing the parsed stream. + */ + public static AtomFeed parseStream(InputStream input) { + XMLStreamReader reader = createReader(input); + + AtomFeed result = AtomFeed.parse(reader); + + try { + reader.close(); + } + catch (XMLStreamException e) { + throw new RuntimeException(e.getMessage(), e); + } + + return result; + } + + /** + * Creates a new {@code AtomFeed} instance based on a given XML element. + * + * @param input The XML stream. + * @return An {@code AtomFeed} instance representing the parsed element. + * @throws RuntimeException The runtime exception if a parse error occurs. + */ + static AtomFeed parse(XMLStreamReader input) { + AtomFeed feed = AtomFeed.create(); + feed.load(input, "feed"); + return feed; + } + + /** + * Initializes the current instance from a given XML element. + * + * @param reader The XML reader. + */ + @Override void init(XMLStreamReader reader) { + assert reader.isStartElement(); + + String name = reader.getLocalName(); + + if (name.equals("entry")) { + AtomEntry entry = AtomEntry.parse(reader); + this.entries.add(entry); + } + else if (name.equals("messages")) { + parseEnd(reader); + } + else if (name.equals("totalResults")) { + this.totalResults = parseText(reader); + } + else if (name.equals("itemsPerPage")) { + this.itemsPerPage = parseText(reader); + } + else if (name.equals("startIndex")) { + this.startIndex = parseText(reader); + } + else { + super.init(reader); + } + } +} + diff --git a/splunk/src/main/java/com/splunk/AtomObject.java b/splunk/src/main/java/com/splunk/AtomObject.java index 21ed8132..6e130096 100644 --- a/splunk/src/main/java/com/splunk/AtomObject.java +++ b/splunk/src/main/java/com/splunk/AtomObject.java @@ -1,248 +1,248 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.InputStream; -import java.util.HashMap; -import java.util.Map; -import javax.xml.stream.Location; -import javax.xml.stream.XMLInputFactory; -import javax.xml.stream.XMLStreamException; -import javax.xml.stream.XMLStreamReader; -import javax.xml.stream.XMLStreamConstants; - -/** - * The {@code AtomObject} class represents a generic Atom object. This class is - * a common base class shared by {@code AtomFeed} and {@code AtomEntry}. - */ -public class AtomObject { - /** The value of the Atom {@code <id>} element. */ - public String id; - - /** The value of the {@code <link>} elements in this {@code AtomObject}. */ - public Map<String, String> links = new HashMap<>(); - - /** The value of the Atom {@code <title>} element. */ - public String title; - - /** The value of the Atom {@code <updated>} element. */ - public String updated; - - /** - * Instantiates the XMLStreamReader, advances to the root element and - * validates the root document structure. This initialization code is shared - * by the {@code AtomFeed} and {@code AtomEntry} parsers. - * - * @param input The input stream. - * @return An {@code XMLStreamReader} initialized reader, advanced to the - * first element of the document. - */ - protected static XMLStreamReader createReader(InputStream input) { - XMLInputFactory factory = XMLInputFactory.newInstance(); - - // The Atom parser assumes that all adjacent text nodes are coalesced - factory.setProperty(XMLInputFactory.IS_COALESCING, true); - factory.setProperty(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, true); - - XMLStreamReader reader; - try { - reader = factory.createXMLStreamReader(input); - } - catch (XMLStreamException e) { - throw new RuntimeException(e.getMessage(), e); - } - - assert reader.getEventType() == XMLStreamConstants.START_DOCUMENT; - - // Scan ahead to first element - scanTag(reader); - - return reader; - } - - /** - * Initialize a property of the current instance based on the given XML - * element. - * - * @param reader The XML reader. - */ - void init(XMLStreamReader reader) { - assert reader.isStartElement(); - - String name = reader.getLocalName(); - - if (name.equals("id")) { - this.id = parseText(reader); - } - else if (name.equals("link")) { - String rel = reader.getAttributeValue(null, "rel"); - String href = reader.getAttributeValue(null, "href"); - this.links.put(rel, href); - parseEnd(reader); - } - else if (name.equals("title")) { - this.title = parseText(reader); - } - else if (name.equals("updated")) { - this.updated = parseText(reader); - } - else { - parseEnd(reader); // Ignore - } - } - - /** - * Initializes the current instance from the given XML element by calling - * the {@code init} method on each child of the XML element. - * - * @param reader The XML reader. - */ - void load(XMLStreamReader reader, String localName) { - assert isStartElement(reader, localName); - - String name = reader.getLocalName(); - - scan(reader); - while (reader.isStartElement()) { - init(reader); - } - - if (!isEndElement(reader, name)) - syntaxError(reader); - - scan(reader); // Consume the end element - } - - /** - * Parses the element at the current cursor position and reads the - * corresponding end element. - * - * @param reader The XML reader. - */ - protected void parseEnd(XMLStreamReader reader) { - scanEnd(reader); // Scan ahead to the end element - scan(reader); // Consume the end element - } - - /** - * Parses and returns the text value of the element at the current cursor - * position and reads the corresponding end element. - * - * @param reader The XML reader. - * @return The element's text value. - */ - protected String parseText(XMLStreamReader reader) { - assert reader.isStartElement(); - - String name = reader.getLocalName(); - - String value = getElementText(reader); - - if (!isEndElement(reader, name)) - syntaxError(reader); - - scan(reader); // Consume the end element - - return value; - } - - // - // Lexical helpers - // - - protected static String getElementText(XMLStreamReader reader) { - try { - return reader.getElementText(); - } - catch (XMLStreamException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - protected static boolean - isEndElement(XMLStreamReader reader, String localName) { - return reader.isEndElement() - && reader.getLocalName().equals(localName); - } - - protected static boolean - isStartElement(XMLStreamReader reader, String localName) { - return reader.isStartElement() - && reader.getLocalName().equals(localName); - } - - // Scan ahead to the next token, skipping whitespace - protected static void scan(XMLStreamReader reader) { - assert !reader.isWhiteSpace(); // current should never be white - try { - do { - reader.next(); - } - while (reader.isWhiteSpace()); // Ignore whitespace - } - catch (XMLStreamException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - // Scan ahead to the end element that matches the current start element. - // Note: function returns cursor located at matching end element. - protected static void scanEnd(XMLStreamReader reader) { - assert reader.isStartElement(); - - String name = reader.getLocalName(); - - while (true) { - scan(reader); - - switch (reader.getEventType()) { - case XMLStreamConstants.CHARACTERS: - continue; - - case XMLStreamConstants.START_ELEMENT: - scanEnd(reader); - continue; - - case XMLStreamConstants.END_ELEMENT: - if (!reader.getLocalName().equals(name)) - syntaxError(reader); - return; - - default: - syntaxError(reader); - } - } - } - - // Scan ahead until the next start tag. - protected static void scanTag(XMLStreamReader reader) { - try { - reader.nextTag(); - } - catch (XMLStreamException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - // Raises a Syntax error runtime exception - protected static void syntaxError(XMLStreamReader reader) { - Location location = reader.getLocation(); - String where = location.toString(); - String message = String.format("Syntax error @ %s", where); - throw new RuntimeException(message); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; +import javax.xml.stream.Location; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; +import javax.xml.stream.XMLStreamConstants; + +/** + * The {@code AtomObject} class represents a generic Atom object. This class is + * a common base class shared by {@code AtomFeed} and {@code AtomEntry}. + */ +public class AtomObject { + /** The value of the Atom {@code <id>} element. */ + public String id; + + /** The value of the {@code <link>} elements in this {@code AtomObject}. */ + public Map<String, String> links = new HashMap<>(); + + /** The value of the Atom {@code <title>} element. */ + public String title; + + /** The value of the Atom {@code <updated>} element. */ + public String updated; + + /** + * Instantiates the XMLStreamReader, advances to the root element and + * validates the root document structure. This initialization code is shared + * by the {@code AtomFeed} and {@code AtomEntry} parsers. + * + * @param input The input stream. + * @return An {@code XMLStreamReader} initialized reader, advanced to the + * first element of the document. + */ + protected static XMLStreamReader createReader(InputStream input) { + XMLInputFactory factory = XMLInputFactory.newInstance(); + + // The Atom parser assumes that all adjacent text nodes are coalesced + factory.setProperty(XMLInputFactory.IS_COALESCING, true); + factory.setProperty(XMLInputFactory.IS_REPLACING_ENTITY_REFERENCES, true); + + XMLStreamReader reader; + try { + reader = factory.createXMLStreamReader(input); + } + catch (XMLStreamException e) { + throw new RuntimeException(e.getMessage(), e); + } + + assert reader.getEventType() == XMLStreamConstants.START_DOCUMENT; + + // Scan ahead to first element + scanTag(reader); + + return reader; + } + + /** + * Initialize a property of the current instance based on the given XML + * element. + * + * @param reader The XML reader. + */ + void init(XMLStreamReader reader) { + assert reader.isStartElement(); + + String name = reader.getLocalName(); + + if (name.equals("id")) { + this.id = parseText(reader); + } + else if (name.equals("link")) { + String rel = reader.getAttributeValue(null, "rel"); + String href = reader.getAttributeValue(null, "href"); + this.links.put(rel, href); + parseEnd(reader); + } + else if (name.equals("title")) { + this.title = parseText(reader); + } + else if (name.equals("updated")) { + this.updated = parseText(reader); + } + else { + parseEnd(reader); // Ignore + } + } + + /** + * Initializes the current instance from the given XML element by calling + * the {@code init} method on each child of the XML element. + * + * @param reader The XML reader. + */ + void load(XMLStreamReader reader, String localName) { + assert isStartElement(reader, localName); + + String name = reader.getLocalName(); + + scan(reader); + while (reader.isStartElement()) { + init(reader); + } + + if (!isEndElement(reader, name)) + syntaxError(reader); + + scan(reader); // Consume the end element + } + + /** + * Parses the element at the current cursor position and reads the + * corresponding end element. + * + * @param reader The XML reader. + */ + protected void parseEnd(XMLStreamReader reader) { + scanEnd(reader); // Scan ahead to the end element + scan(reader); // Consume the end element + } + + /** + * Parses and returns the text value of the element at the current cursor + * position and reads the corresponding end element. + * + * @param reader The XML reader. + * @return The element's text value. + */ + protected String parseText(XMLStreamReader reader) { + assert reader.isStartElement(); + + String name = reader.getLocalName(); + + String value = getElementText(reader); + + if (!isEndElement(reader, name)) + syntaxError(reader); + + scan(reader); // Consume the end element + + return value; + } + + // + // Lexical helpers + // + + protected static String getElementText(XMLStreamReader reader) { + try { + return reader.getElementText(); + } + catch (XMLStreamException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + protected static boolean + isEndElement(XMLStreamReader reader, String localName) { + return reader.isEndElement() + && reader.getLocalName().equals(localName); + } + + protected static boolean + isStartElement(XMLStreamReader reader, String localName) { + return reader.isStartElement() + && reader.getLocalName().equals(localName); + } + + // Scan ahead to the next token, skipping whitespace + protected static void scan(XMLStreamReader reader) { + assert !reader.isWhiteSpace(); // current should never be white + try { + do { + reader.next(); + } + while (reader.isWhiteSpace()); // Ignore whitespace + } + catch (XMLStreamException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + // Scan ahead to the end element that matches the current start element. + // Note: function returns cursor located at matching end element. + protected static void scanEnd(XMLStreamReader reader) { + assert reader.isStartElement(); + + String name = reader.getLocalName(); + + while (true) { + scan(reader); + + switch (reader.getEventType()) { + case XMLStreamConstants.CHARACTERS: + continue; + + case XMLStreamConstants.START_ELEMENT: + scanEnd(reader); + continue; + + case XMLStreamConstants.END_ELEMENT: + if (!reader.getLocalName().equals(name)) + syntaxError(reader); + return; + + default: + syntaxError(reader); + } + } + } + + // Scan ahead until the next start tag. + protected static void scanTag(XMLStreamReader reader) { + try { + reader.nextTag(); + } + catch (XMLStreamException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + // Raises a Syntax error runtime exception + protected static void syntaxError(XMLStreamReader reader) { + Location location = reader.getLocation(); + String where = location.toString(); + String message = String.format("Syntax error @ %s", where); + throw new RuntimeException(message); + } +} diff --git a/splunk/src/main/java/com/splunk/CollectionArgs.java b/splunk/src/main/java/com/splunk/CollectionArgs.java index 25fd35aa..d9c32b2f 100644 --- a/splunk/src/main/java/com/splunk/CollectionArgs.java +++ b/splunk/src/main/java/com/splunk/CollectionArgs.java @@ -1,177 +1,177 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -/** - * The {@code CollectionArgs} class contains arguments for retrieving and - * listing entities from a collection, such as the number of entities to return - * and how to sort them. - */ -public class CollectionArgs extends Args { - - /** - * Indicates whether to sort entries in ascending or descending order. - */ - public static enum SortDirection { - /** Sort entries in ascending order. */ - ASC("asc"), - /** Sort entries in descending order. */ - DESC("desc"); - - private String value; - - private SortDirection(String value) { - this.value = value; - } - - /** - * @return The REST API value for this enumerated constant. - */ - public String toString() { - return this.value; - } - } - - /** - * Indicates the sorting mode for entries. - */ - public static enum SortMode { - /** - * If all values of the field are numbers, collate numerically. - * Otherwise, collate alphabetically. - */ - AUTO("auto"), - /** Collate alphabetically. */ - ALPHA("alpha"), - /** Collate alphabetically, case-sensitive. */ - ALPHA_CASE("alpha_case"), - /** Collate numerically. */ - NUM("num"); - - private String value; - - private SortMode(String value) { - this.value = value; - } - - /** - * @return The REST API value for this enumerated constant. - */ - public String toString() { - return this.value; - } - } - - /** - * Class constructor. - */ - public CollectionArgs() { super(); } - - /* BEGIN AUTOGENERATED CODE */ - - /** - * Sets the app context in which to list the collection. - * - * @param app - * The app context in which to list the collection. A {@code null} value indicates no app context, and a value of {@code "-"} indicates an app wildcard. - */ - public void setApp(String app) { - this.put("app", app); - } - - /** - * Sets the owner context in which to list the collection. - * - * @param owner - * The owner context in which to list the collection. A value of {@code "-"} indicates a wildcard, and a {@code null} value indicates no owner context. - */ - public void setOwner(String owner) { - this.put("owner", owner); - } - - /** - * Sets the sharing context in which to list the collection. - * - * @param sharing - * The sharing context in which to list the collection. Valid values are "user", "app", "global", and "system". - */ - public void setSharing(String sharing) { - this.put("sharing", sharing); - } - - /** - * Sets the maximum number of entries to return. - * - * @param count - * The maximum number of entries to return. To return all entries, specify 0. - */ - public void setCount(int count) { - this.put("count", count); - } - - /** - * Sets the index of the first entry to return. - * - * @param offset - * The index of the first entry to return. - */ - public void setOffset(int offset) { - this.put("offset", offset); - } - - /** - * Sets a search query to filter the response. The response matches field values against the search query. For example, "foo" matches any object that has "foo" as a substring in a field, and "field_name=field_value" restricts the match to a single field. - * - * @param search - * A search query to filter the response. - */ - public void setSearch(String search) { - this.put("search", search); - } - - /** - * Sets the direction to sort entries. - * - * @param sortDirection - * The sorting order--ascending or descending. - */ - public void setSortDirection(SortDirection sortDirection) { - this.put("sort_dir", sortDirection); - } - - /** - * Sets the field to use for sorting. - * - * @param sortKey - * The field to sort by. - */ - public void setSortKey(String sortKey) { - this.put("sort_key", sortKey); - } - - /** - * Sets the mode to use for sorting. - * - * @param sortMode - * The collating sequence for sorting entries. - */ - public void setSortMode(SortMode sortMode) { - this.put("sort_mode", sortMode); - } - - /* END AUTOGENERATED CODE */ -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +/** + * The {@code CollectionArgs} class contains arguments for retrieving and + * listing entities from a collection, such as the number of entities to return + * and how to sort them. + */ +public class CollectionArgs extends Args { + + /** + * Indicates whether to sort entries in ascending or descending order. + */ + public static enum SortDirection { + /** Sort entries in ascending order. */ + ASC("asc"), + /** Sort entries in descending order. */ + DESC("desc"); + + private String value; + + private SortDirection(String value) { + this.value = value; + } + + /** + * @return The REST API value for this enumerated constant. + */ + public String toString() { + return this.value; + } + } + + /** + * Indicates the sorting mode for entries. + */ + public static enum SortMode { + /** + * If all values of the field are numbers, collate numerically. + * Otherwise, collate alphabetically. + */ + AUTO("auto"), + /** Collate alphabetically. */ + ALPHA("alpha"), + /** Collate alphabetically, case-sensitive. */ + ALPHA_CASE("alpha_case"), + /** Collate numerically. */ + NUM("num"); + + private String value; + + private SortMode(String value) { + this.value = value; + } + + /** + * @return The REST API value for this enumerated constant. + */ + public String toString() { + return this.value; + } + } + + /** + * Class constructor. + */ + public CollectionArgs() { super(); } + + /* BEGIN AUTOGENERATED CODE */ + + /** + * Sets the app context in which to list the collection. + * + * @param app + * The app context in which to list the collection. A {@code null} value indicates no app context, and a value of {@code "-"} indicates an app wildcard. + */ + public void setApp(String app) { + this.put("app", app); + } + + /** + * Sets the owner context in which to list the collection. + * + * @param owner + * The owner context in which to list the collection. A value of {@code "-"} indicates a wildcard, and a {@code null} value indicates no owner context. + */ + public void setOwner(String owner) { + this.put("owner", owner); + } + + /** + * Sets the sharing context in which to list the collection. + * + * @param sharing + * The sharing context in which to list the collection. Valid values are "user", "app", "global", and "system". + */ + public void setSharing(String sharing) { + this.put("sharing", sharing); + } + + /** + * Sets the maximum number of entries to return. + * + * @param count + * The maximum number of entries to return. To return all entries, specify 0. + */ + public void setCount(int count) { + this.put("count", count); + } + + /** + * Sets the index of the first entry to return. + * + * @param offset + * The index of the first entry to return. + */ + public void setOffset(int offset) { + this.put("offset", offset); + } + + /** + * Sets a search query to filter the response. The response matches field values against the search query. For example, "foo" matches any object that has "foo" as a substring in a field, and "field_name=field_value" restricts the match to a single field. + * + * @param search + * A search query to filter the response. + */ + public void setSearch(String search) { + this.put("search", search); + } + + /** + * Sets the direction to sort entries. + * + * @param sortDirection + * The sorting order--ascending or descending. + */ + public void setSortDirection(SortDirection sortDirection) { + this.put("sort_dir", sortDirection); + } + + /** + * Sets the field to use for sorting. + * + * @param sortKey + * The field to sort by. + */ + public void setSortKey(String sortKey) { + this.put("sort_key", sortKey); + } + + /** + * Sets the mode to use for sorting. + * + * @param sortMode + * The collating sequence for sorting entries. + */ + public void setSortMode(SortMode sortMode) { + this.put("sort_mode", sortMode); + } + + /* END AUTOGENERATED CODE */ +} diff --git a/splunk/src/main/java/com/splunk/Command.java b/splunk/src/main/java/com/splunk/Command.java index 267e3b15..c37a8d03 100644 --- a/splunk/src/main/java/com/splunk/Command.java +++ b/splunk/src/main/java/com/splunk/Command.java @@ -1,238 +1,236 @@ -/* - * Copyright 2011 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.OptionBuilder; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.apache.commons.cli.PosixParser; - -/** - * This class serves as an example and is unsupported. - * - * Processes and capture command options and arguments - */ -public class Command { - private String appName; - private Options rules = new Options(); - - // The parsed command line arguments - public String[] args = new String[0]; - - // The parsed command line options (flags) - public HashMap<String, Object> opts = new HashMap<>(); - - // Whether or not this is a help request - public Boolean help = false; - - public static final HashMap<String, Object> defaultValues = new HashMap<>(); - { - defaultValues.put("scheme", "https"); - defaultValues.put("host", "localhost"); - defaultValues.put("port", 8089); - } - - Command(String appName) { - this.appName = appName; - } - - public static Command create() { - return create(null); - } - - public static Command create(String appName) { - return new Command(appName); - } - - public static void error(String message, Object... args) { - System.err.format("Error: %s\n", String.format(message, args)); - System.exit(2); - } - - public Options getRules() { - return this.rules; - } - - // Initialize with default Splunk command options. - @SuppressWarnings("static-access") // OptionBuilder API requires this - public Command init() { - rules.addOption("h", "help", false, "Display this help message"); - rules.addOption(null, "host", true, "Host name (default localhost)"); - rules.addOption(OptionBuilder - .withLongOpt("port") - .hasArg(true) - .withType(Integer.class) - .create()); - rules.addOption(null, "scheme", true, "Scheme (default https)"); - rules.addOption(null, "username", true, "Username to login with"); - rules.addOption(null, "password", true, "Password to login with"); - rules.addOption(null, "app", true, "App/namespace context"); - rules.addOption(null, "owner", true, "Owner/user context"); - // This is here only for compatibility with the JavaScript SDK's .splunkrc. - rules.addOption(null, "version", true, "Version (irrelevant for Java)"); - return this; - } - - public Command addRule(String name, String description) { - rules.addOption(null, name, false, description); - return this; - } - - @SuppressWarnings("static-access") // OptionBuilder API requires this - public Command addRule(String name, Class argType, String description) { - rules.addOption( - OptionBuilder - .withLongOpt(name) - .hasArg(true) - .withType(argType) - .withDescription(description) - .create()); - return this; - } - - // Load a file of options and arguments - public Command load(String path) { - ArrayList<String> argList = new ArrayList<>(); - - try (FileReader fileReader = new FileReader(path); - BufferedReader reader = new BufferedReader(fileReader);) { - while (true) { - String line; - line = reader.readLine(); - if (line == null) - break; - if (line.startsWith("#")) - continue; - line = line.trim(); - if (line.length() == 0) - continue; - if (!line.startsWith("-")) - line = "--" + line; - argList.add(line); - } - } - catch (IOException e) { - error(e.getMessage()); - return this; - } - - parse(argList.toArray(new String[argList.size()])); - return this; - } - - // Parse the given argument vector - public Command parse(String[] argv) { - CommandLineParser parser = new PosixParser(); - - CommandLine cmdline = null; - try { - cmdline = parser.parse(this.rules, argv); - } - catch (ParseException e) { - error(e.getMessage()); - } - - // Unpack the cmdline into a simple Map of options and optionally - // assign values to any corresponding fields found in the Command class. - for (Option option : cmdline.getOptions()) { - String name = option.getLongOpt(); - Object value = option.getValue(); - - // Figure out the type of the option and convert the value. - if (!option.hasArg()) { - // If it has no arg, then its implicitly boolean and presence - // of the argument indicates truth. - value = true; - } - else { - Class type = (Class)option.getType(); - if (type == null) { - // Null implies String, no conversion necessary - } - else if (type == Integer.class) { - value = Integer.parseInt((String)value); - } - else { - assert false; // Unsupported type - } - } - - this.opts.put(name, value); - - // Look for a field of the Command class (or subclass) that - // matches the long name of the option and, if found, assign the - // corresponding option value in order to provide simplified - // access to command options. - try { - java.lang.reflect.Field field = this.getClass().getField(name); - field.set(this, value); - } - catch (NoSuchFieldException e) { continue; } - catch (IllegalAccessException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - String[] orig = this.args; - String[] more = cmdline.getArgs(); - this.args = new String[orig.length + more.length]; - System.arraycopy(orig, 0, this.args, 0, orig.length); - System.arraycopy(more, 0, this.args, orig.length, more.length); - - if (this.help) { - printHelp(); - System.exit(0); - } - - return this; - } - - public void printHelp() { - HelpFormatter formatter = new HelpFormatter(); - String appName = this.appName == null ? "App" : this.appName; - formatter.printHelp(appName, this.rules); - } - - public static Command splunk() { - return splunk(null); - } - - // Creates a command instance, initializes with the default Splunk - // command line rules and attempts to load the default options file. - public static Command splunk(String appName) { - return Command.create(appName).init().splunkrc(); - } - - // Load the default options file (.splunkrc) if it exists - public Command splunkrc() { - this.opts.putAll(defaultValues); - load(System.getProperty("user.home") + File.separator + ".splunkrc"); - return this; - } -} - +/* + * Copyright 2011 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.OptionBuilder; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.cli.PosixParser; + +/** + * This class serves as an example and is unsupported. + * + * Processes and capture command options and arguments + */ +public class Command { + private String appName; + private Options rules = new Options(); + + // The parsed command line arguments + public String[] args = new String[0]; + + // The parsed command line options (flags) + public HashMap<String, Object> opts = new HashMap<>(); + + // Whether or not this is a help request + public Boolean help = false; + + public static final HashMap<String, Object> defaultValues = new HashMap<>(); + { + defaultValues.put("scheme", "https"); + defaultValues.put("host", "localhost"); + defaultValues.put("port", 8089); + } + + Command(String appName) { + this.appName = appName; + } + + public static Command create() { + return create(null); + } + + public static Command create(String appName) { + return new Command(appName); + } + + public static void error(String message, Object... args) { + System.err.format("Error: %s\n", String.format(message, args)); + System.exit(2); + } + + public Options getRules() { + return this.rules; + } + + // Initialize with default Splunk command options. + @SuppressWarnings("static-access") // OptionBuilder API requires this + public Command init() { + rules.addOption("h", "help", false, "Display this help message"); + rules.addOption(null, "host", true, "Host name (default localhost)"); + rules.addOption(OptionBuilder + .withLongOpt("port") + .hasArg(true) + .withType(Integer.class) + .create()); + rules.addOption(null, "scheme", true, "Scheme (default https)"); + rules.addOption(null, "username", true, "Username to login with"); + rules.addOption(null, "password", true, "Password to login with"); + rules.addOption(null, "app", true, "App/namespace context"); + rules.addOption(null, "owner", true, "Owner/user context"); + // This is here only for compatibility with the JavaScript SDK's .splunkrc. + rules.addOption(null, "version", true, "Version (irrelevant for Java)"); + return this; + } + + public Command addRule(String name, String description) { + rules.addOption(null, name, false, description); + return this; + } + + @SuppressWarnings("static-access") // OptionBuilder API requires this + public Command addRule(String name, Class argType, String description) { + rules.addOption( + OptionBuilder + .withLongOpt(name) + .hasArg(true) + .withType(argType) + .withDescription(description) + .create()); + return this; + } + + // Load a file of options and arguments + public Command load(String path) { + ArrayList<String> argList = new ArrayList<>(); + + try (FileReader fileReader = new FileReader(path); + BufferedReader reader = new BufferedReader(fileReader);) { + while (true) { + String line; + line = reader.readLine(); + if (line == null) + break; + if (line.startsWith("#") || line.isBlank()) + continue; + line = line.trim(); + if (!line.startsWith("-")) + line = "--" + line; + argList.add(line); + } + } + catch (IOException e) { + error(e.getMessage()); + return this; + } + + parse(argList.toArray(new String[argList.size()])); + return this; + } + + // Parse the given argument vector + public Command parse(String[] argv) { + CommandLineParser parser = new PosixParser(); + + CommandLine cmdline = null; + try { + cmdline = parser.parse(this.rules, argv); + } + catch (ParseException e) { + error(e.getMessage()); + } + + // Unpack the cmdline into a simple Map of options and optionally + // assign values to any corresponding fields found in the Command class. + for (Option option : cmdline.getOptions()) { + String name = option.getLongOpt(); + Object value = option.getValue(); + + // Figure out the type of the option and convert the value. + if (!option.hasArg()) { + // If it has no arg, then its implicitly boolean and presence + // of the argument indicates truth. + value = true; + } + else { + Class type = (Class)option.getType(); + if (type == null) { + // Null implies String, no conversion necessary + } + else if (type == Integer.class) { + value = Integer.parseInt((String)value); + } + else { + assert false; // Unsupported type + } + } + + this.opts.put(name, value); + + // Look for a field of the Command class (or subclass) that + // matches the long name of the option and, if found, assign the + // corresponding option value in order to provide simplified + // access to command options. + try { + java.lang.reflect.Field field = this.getClass().getField(name); + field.set(this, value); + } + catch (NoSuchFieldException e) { continue; } + catch (IllegalAccessException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + String[] orig = this.args; + String[] more = cmdline.getArgs(); + this.args = new String[orig.length + more.length]; + System.arraycopy(orig, 0, this.args, 0, orig.length); + System.arraycopy(more, 0, this.args, orig.length, more.length); + + if (this.help) { + printHelp(); + System.exit(0); + } + + return this; + } + + public void printHelp() { + HelpFormatter formatter = new HelpFormatter(); + String appName = this.appName == null ? "App" : this.appName; + formatter.printHelp(appName, this.rules); + } + + public static Command splunk() { + return splunk(null); + } + + // Creates a command instance, initializes with the default Splunk + // command line rules and attempts to load the default options file. + public static Command splunk(String appName) { + return Command.create(appName).init().splunkrc(); + } + + // Load the default options file (.splunkrc) if it exists + public Command splunkrc() { + this.opts.putAll(defaultValues); + load(System.getProperty("user.home") + File.separator + ".splunkrc"); + return this; + } +} + diff --git a/splunk/src/main/java/com/splunk/DataModel.java b/splunk/src/main/java/com/splunk/DataModel.java index 7711f0b4..028e13e3 100644 --- a/splunk/src/main/java/com/splunk/DataModel.java +++ b/splunk/src/main/java/com/splunk/DataModel.java @@ -1,302 +1,302 @@ -/* - * Copyright 2014 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import com.google.gson.*; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -/** - * DataModel represents a data model on the server. Data models contain - * data model objects, which specify structured views on Splunk data. - */ -public class DataModel extends Entity { - private final static JsonParser jsonParser = new JsonParser(); - private final static Gson gson = new Gson(); - - private static final String ACCELERATION_LABEL = "acceleration"; - private static final String MODEL_NAME_LABEL = "modelName"; - private static final String DISPLAY_NAME_LABEL = "displayName"; - private static final String DESCRIPTION_LABEL = "description"; - private static final String RAW_JSON_LABEL = "description"; // Yes, this is insane. - - // Human readable description, as opposed to the raw JSON, which is also called 'description' - private String description; - - private Map<String, DataModelObject> objects; - private boolean accelerationEnabled; - private String earliestAcceleratedTime; - private String accelerationCronSchedule; - private boolean manualRebuilds; - - DataModel(Service service, String path) { - super(service, path); - // The data provided by the collection is incomplete. Go ahead and refresh so we don't - // have to worry about it. - this.refresh(); - } - - /** - * Returns whether there is an object of the given name in this data model. - * - * @param name Name of the object to check for. - * @return true if there is an object with that name; false otherwise. - */ - public boolean containsObject(String name) { - return this.objects.containsKey(name); - } - - /** - * Retrieve an object by name from this data model. - * - * @param name Name of the object to retrieve. - * @return a DataModelObject if there is such an object; null otherwise. - */ - public DataModelObject getObject(String name) { - return this.objects.get(name); - } - - /** - * @return a collection of all objects in this data model. - */ - public Collection<DataModelObject> getObjects() { - return Collections.unmodifiableCollection(objects.values()); - } - - /** - * Returns the tsidx namespace which holds global acceleration events for this - * data model. The namespace will be returned whether acceleration is enabled - * or not. - * - * @return The tsidx namespace for global acceleration of this data model. - */ - public String getAccelerationNamespace() { - // For the moment, the acceleration namespace for global acceleration of - // data models is the name of the data model. - return getName(); - } - - /** - * @return whether global acceleration is enabled for this data model. - */ - public boolean isAccelerated() { - return this.accelerationEnabled; - } - - /** - * @return A human readable description of this data model. - */ - public String getDescription() { - return this.description; - } - - /** - * @return The raw JSON describing this data model and its objects. - */ - public String getRawJson() { - return getString(RAW_JSON_LABEL); - } - - /** - * @return the human readable name of this data model. - */ - public String getDisplayName() { - return getString(DISPLAY_NAME_LABEL); - } - - @Override - Entity load(AtomObject value) { - Entity result = super.load(value); - // After loading the Atom entity as we would for any other Splunk entity, - // we have to parse the JSON description of the data model and its acceleration - // status. - parseDescription(getString(RAW_JSON_LABEL)); - parseAcceleration(getString(ACCELERATION_LABEL)); - return result; - } - - /** - * Parse the JSON returned from splunkd describing this data model. - * - * This method writes the results into fields of this object. - * - * @param input a String containing JSON. - */ - private void parseDescription(String input) { - objects = new HashMap<>(); - - JsonElement rootElement = jsonParser.parse(input); - - for (Entry<String, JsonElement> entry : rootElement.getAsJsonObject().entrySet()) { - if (entry.getKey().equals(MODEL_NAME_LABEL)) { - content.put(MODEL_NAME_LABEL, entry.getValue().getAsString()); - } else if (entry.getKey().equals(DISPLAY_NAME_LABEL)) { - content.put(DISPLAY_NAME_LABEL, entry.getValue().getAsString()); - } else if (entry.getKey().equals(DESCRIPTION_LABEL)) { - description = entry.getValue().getAsString(); - } else if (entry.getKey().equals("objects")) { - JsonArray objectArray = entry.getValue().getAsJsonArray(); - for (JsonElement object : objectArray) { - DataModelObject dmo = DataModelObject.parse(this, object); - objects.put(dmo.getName(), dmo); - } - } else { - // Allow new keys without complaining - } - } - } - - /** - * Parse the acceleration description from splunkd of this data model. - * - * This method writes the results into fields of this object. - * - * @param input a string containing JSON. - */ - private void parseAcceleration(String input) { - JsonElement rootElement = jsonParser.parse(input); - - for (Entry<String, JsonElement> entry : rootElement. getAsJsonObject().entrySet()) { - if (entry.getKey().equals("enabled")) { - // API is broken in 6.1. It returns 1 instead of true (but does return false). - if (((JsonPrimitive)entry.getValue()).isBoolean()) { - accelerationEnabled = entry.getValue().getAsBoolean(); - } else if (((JsonPrimitive)entry.getValue()).isNumber()) { - accelerationEnabled = entry.getValue().getAsInt() != 0; - } else { - throw new RuntimeException("splunkd returned an unknown value " + entry.getValue().toString() + - " for whether acceleration is enabled."); - } - } else if (entry.getKey().equals("earliest_time")) { - earliestAcceleratedTime = entry.getValue().getAsString(); - } else if (entry.getKey().equals("cron_schedule")) { - accelerationCronSchedule = entry.getValue().getAsString(); - } else if (entry.getKey().equals("manual_rebuilds")) { - if (((JsonPrimitive)entry.getValue()).isBoolean()) { - manualRebuilds = entry.getValue().getAsBoolean(); - } else if (((JsonPrimitive)entry.getValue()).isNumber()) { - manualRebuilds = entry.getValue().getAsInt() != 0; - } else { - throw new RuntimeException("splunkd returned an unknown value " + entry.getValue().toString() + - " for whether manual_rebuilds is enabled."); - } - } else { - // Allow new keys without complaining - } - } - } - - /** - * Enable or disable global acceleration on this data model. - * - * @param enabled true enabled, false disables. - */ - public void setAcceleration(boolean enabled) { - this.accelerationEnabled = enabled; - toUpdate.put("enabled", enabled); - } - - /** - * Return the earliest time of the window over which the data model is accelerated. - * - * Times are represented relative to now, given by a minus sign, a number, and a - * suffix indicating the time unit (e.g., "-2mon", "-1day"). - * - * @return a string representing the earliest accelerated time. - */ - public String getEarliestAcceleratedTime() { - return earliestAcceleratedTime; - } - - /** - * Set the size of the window (from the specified earliest time to now) over - * which the data model should be accelerated. - * - * Times are represented relative to now, given by a minus sign, a number, and a - * suffix indicating the time unit (e.g., "-2mon", "-1day"). - * - * @param earliestAcceleratedTime a string specifying a time. - */ - public void setEarliestAcceleratedTime(String earliestAcceleratedTime) { - this.earliestAcceleratedTime = earliestAcceleratedTime; - toUpdate.put("earliest_time", earliestAcceleratedTime); - } - - /** - * Return the cron schedule on which the cached data for acceleration should be - * updated. - * - * @return a string containing a crontab style schedule specification. - */ - public String getAccelerationCronSchedule() { - return accelerationCronSchedule; - } - - /** - * Set the cron schedule on which the cached data for the acceleration should - * be updated. - * - * @param accelerationCronSchedule a crontab style schedule to use. - */ - public void setAccelerationCronSchedule(String accelerationCronSchedule) { - this.accelerationCronSchedule = accelerationCronSchedule; - toUpdate.put("cron_schedule", accelerationCronSchedule); - } - - /** - * This setting prevents outdated summaries from being rebuilt by the - * 'summarize' command. - * - * @return whether manual rebuilds are enabled for this data model. - */ - public boolean isManualRebuilds() { - return this.manualRebuilds; - } - - /** - * Enable or disable manual rebuilds on this data model. - * - * @param enabled true enabled, false disables. - */ - public void setManualRebuilds(boolean enabled) { - this.manualRebuilds = enabled; - toUpdate.put("manual_rebuilds", enabled); - } - - @Override - public void update() { - // We have to do some munging on the acceleration fields to pass them as JSON - // to the server. - Map<String, Object> accelerationMap = new HashMap<>(); - for (String key : new String[] {"enabled", "earliest_time", "cron_schedule", "manual_rebuilds"}) { - if (toUpdate.containsKey(key)) { - accelerationMap.put(key, toUpdate.get(key)); - toUpdate.remove(key); - } - } - - if (!accelerationMap.isEmpty()) { - toUpdate.put("acceleration", gson.toJson(accelerationMap)); - } - - // Now update like we would any other entity. - super.update(); - } -} +/* + * Copyright 2014 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import com.google.gson.*; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * DataModel represents a data model on the server. Data models contain + * data model objects, which specify structured views on Splunk data. + */ +public class DataModel extends Entity { + private final static JsonParser jsonParser = new JsonParser(); + private final static Gson gson = new Gson(); + + private static final String ACCELERATION_LABEL = "acceleration"; + private static final String MODEL_NAME_LABEL = "modelName"; + private static final String DISPLAY_NAME_LABEL = "displayName"; + private static final String DESCRIPTION_LABEL = "description"; + private static final String RAW_JSON_LABEL = "description"; // Yes, this is insane. + + // Human readable description, as opposed to the raw JSON, which is also called 'description' + private String description; + + private Map<String, DataModelObject> objects; + private boolean accelerationEnabled; + private String earliestAcceleratedTime; + private String accelerationCronSchedule; + private boolean manualRebuilds; + + DataModel(Service service, String path) { + super(service, path); + // The data provided by the collection is incomplete. Go ahead and refresh so we don't + // have to worry about it. + this.refresh(); + } + + /** + * Returns whether there is an object of the given name in this data model. + * + * @param name Name of the object to check for. + * @return true if there is an object with that name; false otherwise. + */ + public boolean containsObject(String name) { + return this.objects.containsKey(name); + } + + /** + * Retrieve an object by name from this data model. + * + * @param name Name of the object to retrieve. + * @return a DataModelObject if there is such an object; null otherwise. + */ + public DataModelObject getObject(String name) { + return this.objects.get(name); + } + + /** + * @return a collection of all objects in this data model. + */ + public Collection<DataModelObject> getObjects() { + return Collections.unmodifiableCollection(objects.values()); + } + + /** + * Returns the tsidx namespace which holds global acceleration events for this + * data model. The namespace will be returned whether acceleration is enabled + * or not. + * + * @return The tsidx namespace for global acceleration of this data model. + */ + public String getAccelerationNamespace() { + // For the moment, the acceleration namespace for global acceleration of + // data models is the name of the data model. + return getName(); + } + + /** + * @return whether global acceleration is enabled for this data model. + */ + public boolean isAccelerated() { + return this.accelerationEnabled; + } + + /** + * @return A human readable description of this data model. + */ + public String getDescription() { + return this.description; + } + + /** + * @return The raw JSON describing this data model and its objects. + */ + public String getRawJson() { + return getString(RAW_JSON_LABEL); + } + + /** + * @return the human readable name of this data model. + */ + public String getDisplayName() { + return getString(DISPLAY_NAME_LABEL); + } + + @Override + Entity load(AtomObject value) { + Entity result = super.load(value); + // After loading the Atom entity as we would for any other Splunk entity, + // we have to parse the JSON description of the data model and its acceleration + // status. + parseDescription(getString(RAW_JSON_LABEL)); + parseAcceleration(getString(ACCELERATION_LABEL)); + return result; + } + + /** + * Parse the JSON returned from splunkd describing this data model. + * + * This method writes the results into fields of this object. + * + * @param input a String containing JSON. + */ + private void parseDescription(String input) { + objects = new HashMap<>(); + + JsonElement rootElement = jsonParser.parse(input); + + for (Entry<String, JsonElement> entry : rootElement.getAsJsonObject().entrySet()) { + if (entry.getKey().equals(MODEL_NAME_LABEL)) { + content.put(MODEL_NAME_LABEL, entry.getValue().getAsString()); + } else if (entry.getKey().equals(DISPLAY_NAME_LABEL)) { + content.put(DISPLAY_NAME_LABEL, entry.getValue().getAsString()); + } else if (entry.getKey().equals(DESCRIPTION_LABEL)) { + description = entry.getValue().getAsString(); + } else if (entry.getKey().equals("objects")) { + JsonArray objectArray = entry.getValue().getAsJsonArray(); + for (JsonElement object : objectArray) { + DataModelObject dmo = DataModelObject.parse(this, object); + objects.put(dmo.getName(), dmo); + } + } else { + // Allow new keys without complaining + } + } + } + + /** + * Parse the acceleration description from splunkd of this data model. + * + * This method writes the results into fields of this object. + * + * @param input a string containing JSON. + */ + private void parseAcceleration(String input) { + JsonElement rootElement = jsonParser.parse(input); + + for (Entry<String, JsonElement> entry : rootElement. getAsJsonObject().entrySet()) { + if (entry.getKey().equals("enabled")) { + // API is broken in 6.1. It returns 1 instead of true (but does return false). + if (((JsonPrimitive)entry.getValue()).isBoolean()) { + accelerationEnabled = entry.getValue().getAsBoolean(); + } else if (((JsonPrimitive)entry.getValue()).isNumber()) { + accelerationEnabled = entry.getValue().getAsInt() != 0; + } else { + throw new RuntimeException("splunkd returned an unknown value " + entry.getValue().toString() + + " for whether acceleration is enabled."); + } + } else if (entry.getKey().equals("earliest_time")) { + earliestAcceleratedTime = entry.getValue().getAsString(); + } else if (entry.getKey().equals("cron_schedule")) { + accelerationCronSchedule = entry.getValue().getAsString(); + } else if (entry.getKey().equals("manual_rebuilds")) { + if (((JsonPrimitive)entry.getValue()).isBoolean()) { + manualRebuilds = entry.getValue().getAsBoolean(); + } else if (((JsonPrimitive)entry.getValue()).isNumber()) { + manualRebuilds = entry.getValue().getAsInt() != 0; + } else { + throw new RuntimeException("splunkd returned an unknown value " + entry.getValue().toString() + + " for whether manual_rebuilds is enabled."); + } + } else { + // Allow new keys without complaining + } + } + } + + /** + * Enable or disable global acceleration on this data model. + * + * @param enabled true enabled, false disables. + */ + public void setAcceleration(boolean enabled) { + this.accelerationEnabled = enabled; + toUpdate.put("enabled", enabled); + } + + /** + * Return the earliest time of the window over which the data model is accelerated. + * + * Times are represented relative to now, given by a minus sign, a number, and a + * suffix indicating the time unit (e.g., "-2mon", "-1day"). + * + * @return a string representing the earliest accelerated time. + */ + public String getEarliestAcceleratedTime() { + return earliestAcceleratedTime; + } + + /** + * Set the size of the window (from the specified earliest time to now) over + * which the data model should be accelerated. + * + * Times are represented relative to now, given by a minus sign, a number, and a + * suffix indicating the time unit (e.g., "-2mon", "-1day"). + * + * @param earliestAcceleratedTime a string specifying a time. + */ + public void setEarliestAcceleratedTime(String earliestAcceleratedTime) { + this.earliestAcceleratedTime = earliestAcceleratedTime; + toUpdate.put("earliest_time", earliestAcceleratedTime); + } + + /** + * Return the cron schedule on which the cached data for acceleration should be + * updated. + * + * @return a string containing a crontab style schedule specification. + */ + public String getAccelerationCronSchedule() { + return accelerationCronSchedule; + } + + /** + * Set the cron schedule on which the cached data for the acceleration should + * be updated. + * + * @param accelerationCronSchedule a crontab style schedule to use. + */ + public void setAccelerationCronSchedule(String accelerationCronSchedule) { + this.accelerationCronSchedule = accelerationCronSchedule; + toUpdate.put("cron_schedule", accelerationCronSchedule); + } + + /** + * This setting prevents outdated summaries from being rebuilt by the + * 'summarize' command. + * + * @return whether manual rebuilds are enabled for this data model. + */ + public boolean isManualRebuilds() { + return this.manualRebuilds; + } + + /** + * Enable or disable manual rebuilds on this data model. + * + * @param enabled true enabled, false disables. + */ + public void setManualRebuilds(boolean enabled) { + this.manualRebuilds = enabled; + toUpdate.put("manual_rebuilds", enabled); + } + + @Override + public void update() { + // We have to do some munging on the acceleration fields to pass them as JSON + // to the server. + Map<String, Object> accelerationMap = new HashMap<>(); + for (String key : new String[] {"enabled", "earliest_time", "cron_schedule", "manual_rebuilds"}) { + if (toUpdate.containsKey(key)) { + accelerationMap.put(key, toUpdate.get(key)); + toUpdate.remove(key); + } + } + + if (!accelerationMap.isEmpty()) { + toUpdate.put("acceleration", gson.toJson(accelerationMap)); + } + + // Now update like we would any other entity. + super.update(); + } +} diff --git a/splunk/src/main/java/com/splunk/DataModelCalculation.java b/splunk/src/main/java/com/splunk/DataModelCalculation.java index a8ba5afe..ff866d39 100644 --- a/splunk/src/main/java/com/splunk/DataModelCalculation.java +++ b/splunk/src/main/java/com/splunk/DataModelCalculation.java @@ -1,165 +1,165 @@ -/* - * Copyright 2014 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ -package com.splunk; - -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; - -import java.util.*; -import java.util.Map.Entry; - -/** - * Abstract class specifying a calculation on a data model object. - */ -public abstract class DataModelCalculation { - private final String[] ownerLineage; - private final String calculationID; - private final Map<String, DataModelField> generatedFields; - private final String comment; - private final boolean editable; - - protected DataModelCalculation(String[] ownerLineage, String calculationID, - Map<String, DataModelField> generatedFields, String comment, boolean editable) { - this.ownerLineage = ownerLineage; - this.calculationID = calculationID; - this.generatedFields = generatedFields; - this.comment = comment; - this.editable = editable; - } - - /** - * @return the ID of this calculation. - */ - public String getCalculationID() { return this.calculationID; } - - /** - * @param fieldName Name of the field to fetch. - * @return whether this calculation generated a field of the given name. - */ - public boolean containsGeneratedField(String fieldName) { - return this.generatedFields.containsKey(fieldName); - } - - /** - * @return a collection of the fields this calculation generates. - */ - public Collection<DataModelField> getGeneratedFields() { - return Collections.unmodifiableCollection(this.generatedFields.values()); - } - - /** - * @param fieldName Name of the field to fetch. - * @return a DataModelField object. - */ - public DataModelField getGeneratedField(String fieldName) { return this.generatedFields.get(fieldName); } - - /** - * @return the comment on this calculation (if one is specified) or null. - */ - public String getComment() { return this.comment; } - - /** - * Returns the name of the object on which this calculation is defined. - * That need not be the one you accessed it from, as it may be inherited from - * another data model object. - * - * @return The name of the object on which this calculation is defined. - */ - public String getOwner() { return this.ownerLineage[this.ownerLineage.length-1]; } - - /** - * Return the lineage of the data model object on which this calculation is - * defined, starting with the most remote ancestor and ending with the data model object - * on which this calculation is defined. - * - * @return an array of the names of data model objects. - */ - public String[] getLineage() { return this.ownerLineage; } - - /** - * @return whether this calculation can be edited, or it is a system defined calculation. - */ - public boolean isEditable() { return this.editable; } - - static DataModelCalculation parse(JsonElement json) { - String type = null; - String calculationId = null; - List<LookupDataModelCalculation.LookupFieldMapping> lookupInputs = - new ArrayList<>(); - String comment = null; - String expression = null; - String lookupName = null; - String lookupField = null; // We need lookupField and inputField to handle the case in Splunk 6.0 - String inputField = null; // where there is only one entry, and it's not in an array. - String[] owner = new String[0]; // Should always be set below - boolean editable = false; - Map<String, DataModelField> outputFields = new HashMap<>(); - - String key; - for (Entry<String, JsonElement> entry : json.getAsJsonObject().entrySet()) { - key = entry.getKey(); - if (key.equals("calculationType")) { - type = entry.getValue().getAsString().toLowerCase(); - } else if (key.equals("calculationID")) { - calculationId = entry.getValue().getAsString(); - } else if (key.equals("outputFields")) { - for (JsonElement e : entry.getValue().getAsJsonArray()) { - DataModelField f = DataModelField.parse(e.getAsJsonObject()); - outputFields.put(f.getName(), f); - } - } else if (key.equals("lookupInputs")) { - for (JsonElement lookupInputJsonElement : entry.getValue().getAsJsonArray()) { - if (!(lookupInputJsonElement instanceof JsonObject lookupInputJson)) { - throw new RuntimeException("Expected a JSON object for lookupInput entry."); - } - LookupDataModelCalculation.LookupFieldMapping mapping = new LookupDataModelCalculation.LookupFieldMapping(); - mapping.inputField = lookupInputJson.get("inputField").getAsString(); - mapping.lookupField = lookupInputJson.get("lookupField").getAsString(); - lookupInputs.add(mapping); - } - } else if (key.equals("inputField")) { - inputField = entry.getValue().getAsString(); - } else if (key.equals("comment")) { - comment = entry.getValue().getAsString(); - } else if (key.equals("expression")) { - expression = entry.getValue().getAsString(); - } else if (key.equals("lookupName")) { - lookupName = entry.getValue().getAsString(); - } else if (key.equals("lookupField")) { - lookupField = entry.getValue().getAsString(); - } else if (key.equals("owner")) { - owner = entry.getValue().getAsString().split("\\."); - } else if (key.equals("editable")) { - editable = entry.getValue().getAsBoolean(); - } - } - - DataModelCalculation c; - if (type.equals("lookup")) { - c = new LookupDataModelCalculation(owner, calculationId, outputFields, comment, editable, lookupName, lookupInputs); - } else if (type.equals("geoip")) { - c = new GeoIPDataModelCalculation(owner, calculationId, outputFields, comment, editable, inputField); - } else if (type.equals("eval")) { - c = new EvalDataModelCalculation(owner, calculationId, outputFields, comment, editable, expression); - } else if (type.equals("rex")) { - c = new RegexpDataModelCalculation(owner, calculationId, outputFields, comment, editable, inputField, expression); - } else { - throw new IllegalStateException("Unknown calculation type: " + type); - } - - return c; - } -} +/* + * Copyright 2014 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.splunk; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +import java.util.*; +import java.util.Map.Entry; + +/** + * Abstract class specifying a calculation on a data model object. + */ +public abstract class DataModelCalculation { + private final String[] ownerLineage; + private final String calculationID; + private final Map<String, DataModelField> generatedFields; + private final String comment; + private final boolean editable; + + protected DataModelCalculation(String[] ownerLineage, String calculationID, + Map<String, DataModelField> generatedFields, String comment, boolean editable) { + this.ownerLineage = ownerLineage; + this.calculationID = calculationID; + this.generatedFields = generatedFields; + this.comment = comment; + this.editable = editable; + } + + /** + * @return the ID of this calculation. + */ + public String getCalculationID() { return this.calculationID; } + + /** + * @param fieldName Name of the field to fetch. + * @return whether this calculation generated a field of the given name. + */ + public boolean containsGeneratedField(String fieldName) { + return this.generatedFields.containsKey(fieldName); + } + + /** + * @return a collection of the fields this calculation generates. + */ + public Collection<DataModelField> getGeneratedFields() { + return Collections.unmodifiableCollection(this.generatedFields.values()); + } + + /** + * @param fieldName Name of the field to fetch. + * @return a DataModelField object. + */ + public DataModelField getGeneratedField(String fieldName) { return this.generatedFields.get(fieldName); } + + /** + * @return the comment on this calculation (if one is specified) or null. + */ + public String getComment() { return this.comment; } + + /** + * Returns the name of the object on which this calculation is defined. + * That need not be the one you accessed it from, as it may be inherited from + * another data model object. + * + * @return The name of the object on which this calculation is defined. + */ + public String getOwner() { return this.ownerLineage[this.ownerLineage.length-1]; } + + /** + * Return the lineage of the data model object on which this calculation is + * defined, starting with the most remote ancestor and ending with the data model object + * on which this calculation is defined. + * + * @return an array of the names of data model objects. + */ + public String[] getLineage() { return this.ownerLineage; } + + /** + * @return whether this calculation can be edited, or it is a system defined calculation. + */ + public boolean isEditable() { return this.editable; } + + static DataModelCalculation parse(JsonElement json) { + String type = null; + String calculationId = null; + List<LookupDataModelCalculation.LookupFieldMapping> lookupInputs = + new ArrayList<>(); + String comment = null; + String expression = null; + String lookupName = null; + String lookupField = null; // We need lookupField and inputField to handle the case in Splunk 6.0 + String inputField = null; // where there is only one entry, and it's not in an array. + String[] owner = new String[0]; // Should always be set below + boolean editable = false; + Map<String, DataModelField> outputFields = new HashMap<>(); + + String key; + for (Entry<String, JsonElement> entry : json.getAsJsonObject().entrySet()) { + key = entry.getKey(); + if (key.equals("calculationType")) { + type = entry.getValue().getAsString().toLowerCase(); + } else if (key.equals("calculationID")) { + calculationId = entry.getValue().getAsString(); + } else if (key.equals("outputFields")) { + for (JsonElement e : entry.getValue().getAsJsonArray()) { + DataModelField f = DataModelField.parse(e.getAsJsonObject()); + outputFields.put(f.getName(), f); + } + } else if (key.equals("lookupInputs")) { + for (JsonElement lookupInputJsonElement : entry.getValue().getAsJsonArray()) { + if (!(lookupInputJsonElement instanceof JsonObject lookupInputJson)) { + throw new RuntimeException("Expected a JSON object for lookupInput entry."); + } + LookupDataModelCalculation.LookupFieldMapping mapping = new LookupDataModelCalculation.LookupFieldMapping(); + mapping.inputField = lookupInputJson.get("inputField").getAsString(); + mapping.lookupField = lookupInputJson.get("lookupField").getAsString(); + lookupInputs.add(mapping); + } + } else if (key.equals("inputField")) { + inputField = entry.getValue().getAsString(); + } else if (key.equals("comment")) { + comment = entry.getValue().getAsString(); + } else if (key.equals("expression")) { + expression = entry.getValue().getAsString(); + } else if (key.equals("lookupName")) { + lookupName = entry.getValue().getAsString(); + } else if (key.equals("lookupField")) { + lookupField = entry.getValue().getAsString(); + } else if (key.equals("owner")) { + owner = entry.getValue().getAsString().split("\\."); + } else if (key.equals("editable")) { + editable = entry.getValue().getAsBoolean(); + } + } + + DataModelCalculation c; + if (type.equals("lookup")) { + c = new LookupDataModelCalculation(owner, calculationId, outputFields, comment, editable, lookupName, lookupInputs); + } else if (type.equals("geoip")) { + c = new GeoIPDataModelCalculation(owner, calculationId, outputFields, comment, editable, inputField); + } else if (type.equals("eval")) { + c = new EvalDataModelCalculation(owner, calculationId, outputFields, comment, editable, expression); + } else if (type.equals("rex")) { + c = new RegexpDataModelCalculation(owner, calculationId, outputFields, comment, editable, inputField, expression); + } else { + throw new IllegalStateException("Unknown calculation type: " + type); + } + + return c; + } +} diff --git a/splunk/src/main/java/com/splunk/DataModelObject.java b/splunk/src/main/java/com/splunk/DataModelObject.java index f95d98ef..d8aade24 100644 --- a/splunk/src/main/java/com/splunk/DataModelObject.java +++ b/splunk/src/main/java/com/splunk/DataModelObject.java @@ -1,394 +1,394 @@ -/* - * Copyright 2014 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ -package com.splunk; - -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; - -import java.util.*; -import java.util.Map.Entry; - -/** - * DataModelObject represents one of the structured views in a data model. - */ -public class DataModelObject { - private DataModel model; - private String name; - private String[] lineage; - private String displayName; - private String parentName; - - private Map<String, DataModelField> autoextractedFields; - private Collection<DataModelConstraint> constraints; - private Map<String, DataModelCalculation> calculations; - - protected DataModelObject(DataModel model) { - this.model = model; - } - - /** - * Checks whether there is a field with the given name in this - * data model object. - * - * @param fieldName name of the field to check for. - * @return true if there is such a field; false otherwise. - */ - public boolean containsField(String fieldName) { - if (autoextractedFields.containsKey(fieldName)) { - return true; - } - for (DataModelCalculation c : calculations.values()) { - if (c.containsGeneratedField(fieldName)) { - return true; - } - } - return false; - } - - /** - * Local acceleration is tsidx acceleration of a data model object that is handled - * manually by a user. You create a job which generates an index, and then use that - * index in your pivots on the data model object. - * - * The namespace created by the job is 'sid={sid}' where {sid} is the job's sid. You - * would use it in another job by starting your search query with - * - * | tstats ... from sid={sid} | ... - * - * The tsidx index created by this job is deleted when the job is garbage collected - * by Splunk. - * - * It is the user's responsibility to manage this job, including cancelling it. - * - * @return a Job writing a tsidx index. - */ - public Job createLocalAccelerationJob() { - return createLocalAccelerationJob(null); - } - - /** - * Local acceleration is tsidx acceleration of a data model object that is handled - * manually by a user. You create a job which generates an index, and then use that - * index in your pivots on the data model object. - * - * The namespace created by the job is 'sid={sid}' where {sid} is the job's sid. You - * would use it in another job by starting your search query with - * - * | tstats ... from sid={sid} | ... - * - * The tsidx index created by this job is deleted when the job is garbage collected - * by Splunk. - * - * It is the user's responsibility to manage this job, including cancelling it. - * - * @param earliestTime A time modifier (e.g., "-2w") setting the earliest time to index. - * @return a Job writing a tsidx index. - */ - public Job createLocalAccelerationJob(String earliestTime) { - String query = "| datamodel " + this.model.getName() + " " + - this.getName() + " search | tscollect"; - JobArgs args = new JobArgs(); - if (earliestTime != null) { - args.setEarliestTime(earliestTime); - } - return this.model.getService().search(query, args); - } - - /** - * Return the calculations done by this data model object to produce fields. - * - * Each calculation has a unique ID assigned to it by splunkd, which is the key - * in the returned map. For most purposes you will probably only want the values. - * - * @return a map of calculation IDs to DataModelCalculation objects. - */ - public Map<String, DataModelCalculation> getCalculations() { - return Collections.unmodifiableMap(this.calculations); - } - - /** - * Fetch a calculation by its unique ID. - * - * @param calculationId a splunkd assigned unique ID for this calculation. - * @return a DataModelCalculation object. - */ - public DataModelCalculation getCalculation(String calculationId) { - return this.calculations.get(calculationId); - } - - /** - * @return a collection of the constraints limiting events that will appear in this data model object. - */ - public Collection<DataModelConstraint> getConstraints() { - return Collections.unmodifiableCollection(this.constraints); - } - - /** - * Fetch the data model on which this object is defined. - * - * @return A DataModel instance containing this object. - */ - public DataModel getDataModel() { - return this.model; - } - - /** - * @return the human readable name of this data model object. - */ - public String getDisplayName() { - return this.displayName; - } - - /** - * Fetch a single field of a given name from this data model object. - * - * @param fieldName Name of the field to fetch. - * @return A DataModelField object, or null if there is no field of the given name. - */ - public DataModelField getField(String fieldName) { - if (autoextractedFields.containsKey(fieldName)) { - return autoextractedFields.get(fieldName); - } - for (DataModelCalculation c : this.calculations.values()) { - if (c.containsGeneratedField(fieldName)) { - return c.getGeneratedField(fieldName); - } - } - return null; - } - - /** - * Get a collection of objects specifying all the fields that were automatically extracted - * from events (as opposed to generated by calculations in a data model). - * - * @return a collection of DataModelField objects. - */ - public Collection<DataModelField> getAutoExtractedFields() { - return Collections.unmodifiableCollection(autoextractedFields.values()); - } - - /** - * Return all the fields, whether input or created by calculations. - * @return a collection of DataModelField objects. - */ - public Collection<DataModelField> getFields() { - Collection<DataModelField> fields = new ArrayList<>(); - fields.addAll(this.autoextractedFields.values()); - for (DataModelCalculation c : this.calculations.values()) { - fields.addAll(c.getGeneratedFields()); - } - return fields; - } - - public String getQuery() { - return "| datamodel " + this.getDataModel().getName() + " " + this.getName() + " search"; - } - - /** - * @return Splunk's identifier for this data model object. - */ - public String getName() { return this.name; } - - /** - * Data model objects can inherit from other data model objects - * in the same data model (or from a couple of global base objects - * such as BaseEvent and BaseTransaction). The lineage is a list of - * data model object names tracing this inheritance, starting with the - * most remote ancestor and ending with this object. - * - * @return An array of names, starting with this object's name, followed by - * the names up the hierarchy. - */ - public String[] getLineage() { return this.lineage; } - - /** - * Returns the name of the parent of this object. - * - * @return a String giving the name. - */ - public String getParentName() { - return this.parentName; - } - - /** - * @return the data model object this one inherits from if it is a user defined data model object - * in the same data model; otherwise returns null (for example if the data model object inherits from BaseEvent - * or BaseTransaction). - */ - public DataModelObject getParent() { - return this.getDataModel().getObject(this.parentName); - } - - /** - * Create a PivotSpecification on this data model object. - * - * @return a PivotSpecification instance. - */ - public PivotSpecification createPivotSpecification() { - return new PivotSpecification(this); - } - - /** - * Start a job that fetches all the events of this data model object. - * - * @return a Job object. - */ - public Job runQuery() { - return runQuery("", null); - } - - /** - * Start a job that fetches all the events of this data model object. - * - * @param args arguments specifying behavior of the job. - * @return a Job object. - */ - public Job runQuery(JobArgs args) { - return runQuery("", args); - } - - /** - * Start a job that applies querySuffix to all the events in this data model object. - * - * @param querySuffix a search query, starting with a '|' that will be appended to the command to fetch - * the contents of this data model object (e.g., "| head 3"). - * @return a Job object. - */ - public Job runQuery(String querySuffix) { - return runQuery(querySuffix, null); - } - - /** - * Start a job that applies querySuffix to all the events in this data model object. - * - * @param querySuffix a search query, starting with a '|' that will be appended to the command to fetch - * the contents of this data model object (e.g., "| head 3"). - * @param args arguments to control the job. - * @return a Job object. - */ - public Job runQuery(String querySuffix, JobArgs args) { - return getDataModel().getService().search(getQuery() + querySuffix, args); - } - - /** - * Produce a data model object from a JSON dictionary specifying it plus a data model that contains it. - - * @param dataModel a DataModel instance that contains this data model object. - * @param object a JsonElement (as produced by Gson) specifying this data model object (usually one of - * the entries in the array of objects in the JSON description of the data model). - * @return a DataModelObject instance. - */ - static DataModelObject parse(DataModel dataModel, JsonElement object) { - String name = null; - String displayName = null; - String comment = null; - String[] lineage = new String[0]; - String parentName = null; - Map<String, DataModelField> fields = new HashMap<>(); - Collection<String> children = new ArrayList<>(); - Collection<DataModelConstraint> constraints = new ArrayList<>(); - Map<String, DataModelCalculation> calculations = new HashMap<>(); - - // Fields specific to objects inheriting directly from BaseSearch. - String baseSearch = null; - // Fields specific to objects inheriting directly from BaseTransaction - String transactionMaxPause = null; - String transactionMaxTimeSpan = null; - Collection<String> groupByFields = new ArrayList<>(); - Collection<String> objectsToGroup = new ArrayList<>(); - - for (Entry<String, JsonElement> entry : object.getAsJsonObject().entrySet()) { - if (entry.getKey().equals("objectName")) { - name = entry.getValue().getAsString(); - } else if (entry.getKey().equals("displayName")) { - displayName = entry.getValue().getAsString(); - } else if (entry.getKey().equals("lineage")) { - lineage = entry.getValue().getAsString().split("\\."); - } else if (entry.getKey().equals("parentName")) { - parentName = entry.getValue().getAsString(); - } else if (entry.getKey().equals("fields")) { - JsonArray fieldsJson = entry.getValue().getAsJsonArray(); - fields.clear(); - - for (JsonElement fieldJson : fieldsJson) { - DataModelField field = DataModelField.parse(fieldJson); - fields.put(field.getName(), field); - } - } else if (entry.getKey().equals("constraints")) { - JsonArray constraintsJson = entry.getValue().getAsJsonArray(); - - for (JsonElement constraintJson : constraintsJson) { - DataModelConstraint constraint = DataModelConstraint.parse(constraintJson); - constraints.add(constraint); - } - } else if (entry.getKey().equals("calculations")) { - calculations.clear(); - for (JsonElement cjson : entry.getValue().getAsJsonArray()) { - DataModelCalculation c = DataModelCalculation.parse(cjson); - String cid = c.getCalculationID(); - calculations.put(cid, c); - } - } else if (entry.getKey().equals("baseSearch")) { - baseSearch = entry.getValue().getAsString(); - } else if (entry.getKey().equals("transactionMaxPause")) { - transactionMaxPause = entry.getValue().getAsString(); - } else if (entry.getKey().equals("transactionMaxTimeSpan")) { - transactionMaxTimeSpan = entry.getValue().getAsString(); - } else if (entry.getKey().equals("groupByFields")) { - for (JsonElement e : entry.getValue().getAsJsonArray()) { - groupByFields.add(e.getAsString()); - } - } else if (entry.getKey().equals("objectsToGroup")) { - for (JsonElement e : entry.getValue().getAsJsonArray()) { - objectsToGroup.add(e.getAsString()); - } - } - } - - DataModelObject dmo; - // Create the right subclass of DataModelObject. - if (baseSearch != null) { - dmo = new DataModelSearch(dataModel); - } else if (transactionMaxPause != null) { - dmo = new DataModelTransaction(dataModel); - } else { - dmo = new DataModelObject(dataModel); - } - - // Set the fields common to all data model objects - dmo.name = name; - dmo.displayName = displayName; - dmo.lineage = lineage; - dmo.parentName = parentName; - dmo.autoextractedFields = fields; - dmo.constraints = constraints; - dmo.calculations = calculations; - - // Set the fields of particular types - if (baseSearch != null) { - ((DataModelSearch)dmo).baseSearch = baseSearch; - } else if (transactionMaxPause != null) { - ((DataModelTransaction)dmo).groupByFields = groupByFields; - ((DataModelTransaction)dmo).objectsToGroup = objectsToGroup; - ((DataModelTransaction)dmo).maxPause = transactionMaxPause; - ((DataModelTransaction)dmo).maxSpan = transactionMaxTimeSpan; - } else { - // Has no additional fields - } - - return dmo; - } -} +/* + * Copyright 2014 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.splunk; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; + +import java.util.*; +import java.util.Map.Entry; + +/** + * DataModelObject represents one of the structured views in a data model. + */ +public class DataModelObject { + private DataModel model; + private String name; + private String[] lineage; + private String displayName; + private String parentName; + + private Map<String, DataModelField> autoextractedFields; + private Collection<DataModelConstraint> constraints; + private Map<String, DataModelCalculation> calculations; + + protected DataModelObject(DataModel model) { + this.model = model; + } + + /** + * Checks whether there is a field with the given name in this + * data model object. + * + * @param fieldName name of the field to check for. + * @return true if there is such a field; false otherwise. + */ + public boolean containsField(String fieldName) { + if (autoextractedFields.containsKey(fieldName)) { + return true; + } + for (DataModelCalculation c : calculations.values()) { + if (c.containsGeneratedField(fieldName)) { + return true; + } + } + return false; + } + + /** + * Local acceleration is tsidx acceleration of a data model object that is handled + * manually by a user. You create a job which generates an index, and then use that + * index in your pivots on the data model object. + * + * The namespace created by the job is 'sid={sid}' where {sid} is the job's sid. You + * would use it in another job by starting your search query with + * + * | tstats ... from sid={sid} | ... + * + * The tsidx index created by this job is deleted when the job is garbage collected + * by Splunk. + * + * It is the user's responsibility to manage this job, including cancelling it. + * + * @return a Job writing a tsidx index. + */ + public Job createLocalAccelerationJob() { + return createLocalAccelerationJob(null); + } + + /** + * Local acceleration is tsidx acceleration of a data model object that is handled + * manually by a user. You create a job which generates an index, and then use that + * index in your pivots on the data model object. + * + * The namespace created by the job is 'sid={sid}' where {sid} is the job's sid. You + * would use it in another job by starting your search query with + * + * | tstats ... from sid={sid} | ... + * + * The tsidx index created by this job is deleted when the job is garbage collected + * by Splunk. + * + * It is the user's responsibility to manage this job, including cancelling it. + * + * @param earliestTime A time modifier (e.g., "-2w") setting the earliest time to index. + * @return a Job writing a tsidx index. + */ + public Job createLocalAccelerationJob(String earliestTime) { + String query = "| datamodel " + this.model.getName() + " " + + this.getName() + " search | tscollect"; + JobArgs args = new JobArgs(); + if (earliestTime != null) { + args.setEarliestTime(earliestTime); + } + return this.model.getService().search(query, args); + } + + /** + * Return the calculations done by this data model object to produce fields. + * + * Each calculation has a unique ID assigned to it by splunkd, which is the key + * in the returned map. For most purposes you will probably only want the values. + * + * @return a map of calculation IDs to DataModelCalculation objects. + */ + public Map<String, DataModelCalculation> getCalculations() { + return Collections.unmodifiableMap(this.calculations); + } + + /** + * Fetch a calculation by its unique ID. + * + * @param calculationId a splunkd assigned unique ID for this calculation. + * @return a DataModelCalculation object. + */ + public DataModelCalculation getCalculation(String calculationId) { + return this.calculations.get(calculationId); + } + + /** + * @return a collection of the constraints limiting events that will appear in this data model object. + */ + public Collection<DataModelConstraint> getConstraints() { + return Collections.unmodifiableCollection(this.constraints); + } + + /** + * Fetch the data model on which this object is defined. + * + * @return A DataModel instance containing this object. + */ + public DataModel getDataModel() { + return this.model; + } + + /** + * @return the human readable name of this data model object. + */ + public String getDisplayName() { + return this.displayName; + } + + /** + * Fetch a single field of a given name from this data model object. + * + * @param fieldName Name of the field to fetch. + * @return A DataModelField object, or null if there is no field of the given name. + */ + public DataModelField getField(String fieldName) { + if (autoextractedFields.containsKey(fieldName)) { + return autoextractedFields.get(fieldName); + } + for (DataModelCalculation c : this.calculations.values()) { + if (c.containsGeneratedField(fieldName)) { + return c.getGeneratedField(fieldName); + } + } + return null; + } + + /** + * Get a collection of objects specifying all the fields that were automatically extracted + * from events (as opposed to generated by calculations in a data model). + * + * @return a collection of DataModelField objects. + */ + public Collection<DataModelField> getAutoExtractedFields() { + return Collections.unmodifiableCollection(autoextractedFields.values()); + } + + /** + * Return all the fields, whether input or created by calculations. + * @return a collection of DataModelField objects. + */ + public Collection<DataModelField> getFields() { + Collection<DataModelField> fields = new ArrayList<>(); + fields.addAll(this.autoextractedFields.values()); + for (DataModelCalculation c : this.calculations.values()) { + fields.addAll(c.getGeneratedFields()); + } + return fields; + } + + public String getQuery() { + return "| datamodel " + this.getDataModel().getName() + " " + this.getName() + " search"; + } + + /** + * @return Splunk's identifier for this data model object. + */ + public String getName() { return this.name; } + + /** + * Data model objects can inherit from other data model objects + * in the same data model (or from a couple of global base objects + * such as BaseEvent and BaseTransaction). The lineage is a list of + * data model object names tracing this inheritance, starting with the + * most remote ancestor and ending with this object. + * + * @return An array of names, starting with this object's name, followed by + * the names up the hierarchy. + */ + public String[] getLineage() { return this.lineage; } + + /** + * Returns the name of the parent of this object. + * + * @return a String giving the name. + */ + public String getParentName() { + return this.parentName; + } + + /** + * @return the data model object this one inherits from if it is a user defined data model object + * in the same data model; otherwise returns null (for example if the data model object inherits from BaseEvent + * or BaseTransaction). + */ + public DataModelObject getParent() { + return this.getDataModel().getObject(this.parentName); + } + + /** + * Create a PivotSpecification on this data model object. + * + * @return a PivotSpecification instance. + */ + public PivotSpecification createPivotSpecification() { + return new PivotSpecification(this); + } + + /** + * Start a job that fetches all the events of this data model object. + * + * @return a Job object. + */ + public Job runQuery() { + return runQuery("", null); + } + + /** + * Start a job that fetches all the events of this data model object. + * + * @param args arguments specifying behavior of the job. + * @return a Job object. + */ + public Job runQuery(JobArgs args) { + return runQuery("", args); + } + + /** + * Start a job that applies querySuffix to all the events in this data model object. + * + * @param querySuffix a search query, starting with a '|' that will be appended to the command to fetch + * the contents of this data model object (e.g., "| head 3"). + * @return a Job object. + */ + public Job runQuery(String querySuffix) { + return runQuery(querySuffix, null); + } + + /** + * Start a job that applies querySuffix to all the events in this data model object. + * + * @param querySuffix a search query, starting with a '|' that will be appended to the command to fetch + * the contents of this data model object (e.g., "| head 3"). + * @param args arguments to control the job. + * @return a Job object. + */ + public Job runQuery(String querySuffix, JobArgs args) { + return getDataModel().getService().search(getQuery() + querySuffix, args); + } + + /** + * Produce a data model object from a JSON dictionary specifying it plus a data model that contains it. + + * @param dataModel a DataModel instance that contains this data model object. + * @param object a JsonElement (as produced by Gson) specifying this data model object (usually one of + * the entries in the array of objects in the JSON description of the data model). + * @return a DataModelObject instance. + */ + static DataModelObject parse(DataModel dataModel, JsonElement object) { + String name = null; + String displayName = null; + String comment = null; + String[] lineage = new String[0]; + String parentName = null; + Map<String, DataModelField> fields = new HashMap<>(); + Collection<String> children = new ArrayList<>(); + Collection<DataModelConstraint> constraints = new ArrayList<>(); + Map<String, DataModelCalculation> calculations = new HashMap<>(); + + // Fields specific to objects inheriting directly from BaseSearch. + String baseSearch = null; + // Fields specific to objects inheriting directly from BaseTransaction + String transactionMaxPause = null; + String transactionMaxTimeSpan = null; + Collection<String> groupByFields = new ArrayList<>(); + Collection<String> objectsToGroup = new ArrayList<>(); + + for (Entry<String, JsonElement> entry : object.getAsJsonObject().entrySet()) { + if (entry.getKey().equals("objectName")) { + name = entry.getValue().getAsString(); + } else if (entry.getKey().equals("displayName")) { + displayName = entry.getValue().getAsString(); + } else if (entry.getKey().equals("lineage")) { + lineage = entry.getValue().getAsString().split("\\."); + } else if (entry.getKey().equals("parentName")) { + parentName = entry.getValue().getAsString(); + } else if (entry.getKey().equals("fields")) { + JsonArray fieldsJson = entry.getValue().getAsJsonArray(); + fields.clear(); + + for (JsonElement fieldJson : fieldsJson) { + DataModelField field = DataModelField.parse(fieldJson); + fields.put(field.getName(), field); + } + } else if (entry.getKey().equals("constraints")) { + JsonArray constraintsJson = entry.getValue().getAsJsonArray(); + + for (JsonElement constraintJson : constraintsJson) { + DataModelConstraint constraint = DataModelConstraint.parse(constraintJson); + constraints.add(constraint); + } + } else if (entry.getKey().equals("calculations")) { + calculations.clear(); + for (JsonElement cjson : entry.getValue().getAsJsonArray()) { + DataModelCalculation c = DataModelCalculation.parse(cjson); + String cid = c.getCalculationID(); + calculations.put(cid, c); + } + } else if (entry.getKey().equals("baseSearch")) { + baseSearch = entry.getValue().getAsString(); + } else if (entry.getKey().equals("transactionMaxPause")) { + transactionMaxPause = entry.getValue().getAsString(); + } else if (entry.getKey().equals("transactionMaxTimeSpan")) { + transactionMaxTimeSpan = entry.getValue().getAsString(); + } else if (entry.getKey().equals("groupByFields")) { + for (JsonElement e : entry.getValue().getAsJsonArray()) { + groupByFields.add(e.getAsString()); + } + } else if (entry.getKey().equals("objectsToGroup")) { + for (JsonElement e : entry.getValue().getAsJsonArray()) { + objectsToGroup.add(e.getAsString()); + } + } + } + + DataModelObject dmo; + // Create the right subclass of DataModelObject. + if (baseSearch != null) { + dmo = new DataModelSearch(dataModel); + } else if (transactionMaxPause != null) { + dmo = new DataModelTransaction(dataModel); + } else { + dmo = new DataModelObject(dataModel); + } + + // Set the fields common to all data model objects + dmo.name = name; + dmo.displayName = displayName; + dmo.lineage = lineage; + dmo.parentName = parentName; + dmo.autoextractedFields = fields; + dmo.constraints = constraints; + dmo.calculations = calculations; + + // Set the fields of particular types + if (baseSearch != null) { + ((DataModelSearch)dmo).baseSearch = baseSearch; + } else if (transactionMaxPause != null) { + ((DataModelTransaction)dmo).groupByFields = groupByFields; + ((DataModelTransaction)dmo).objectsToGroup = objectsToGroup; + ((DataModelTransaction)dmo).maxPause = transactionMaxPause; + ((DataModelTransaction)dmo).maxSpan = transactionMaxTimeSpan; + } else { + // Has no additional fields + } + + return dmo; + } +} diff --git a/splunk/src/main/java/com/splunk/Entity.java b/splunk/src/main/java/com/splunk/Entity.java index 12cbbe6f..c2275736 100644 --- a/splunk/src/main/java/com/splunk/Entity.java +++ b/splunk/src/main/java/com/splunk/Entity.java @@ -1,493 +1,493 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.*; - -/** - * The {@code Entity} class represents a Splunk entity. - */ -public class Entity extends Resource implements Map<String, Object> { - protected Record content; - protected HashMap<String, Object> toUpdate = new LinkedHashMap<>(); - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The entity's endpoint. - */ - public Entity(Service service, String path) { - super(service, path); - } - - /** - * Returns the path that corresponds to the requested action. - * - * @param action The requested action. - * @return The return path. - */ - protected String actionPath(String action) { - if (action.equals("disable")) - return path + "/disable"; - if (action.equals("edit")) - return path; - if (action.equals("enable")) - return path + "/enable"; - if (action.equals("remove")) - return path; - if (action.equals("acl")) - return path + "/acl"; - throw new IllegalArgumentException("Invalid action: " + action); - } - - /** {@inheritDoc} */ - public void clear() { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - public boolean containsKey(Object key) { - return getContent().containsKey(key); - } - - /** {@inheritDoc} */ - public boolean containsValue(Object value) { - return getContent().containsValue(value); - } - - /** - * Disables the entity that is named by this endpoint. This method is - * available on almost every endpoint. - */ - public void disable() { - service.post(actionPath("disable")); - invalidate(); - } - - /** - * Enables the entity that is named by this endpoint. This method is - * available on almost every endpoint. - */ - public void enable() { - service.post(actionPath("enable")); - invalidate(); - } - - /** {@inheritDoc} */ - public Set<Map.Entry<String, Object>> entrySet() { - return getContent().entrySet(); - } - - /** {@inheritDoc} */ - public Object get(Object key) { - if (toUpdate.containsKey(key)) return toUpdate.get(key); - return getContent().get(key); - } - - /** - * Returns the Boolean value associated with the specified key. Values - * can be converted from: 0, 1, true, false. - * - * @param key The key to look up. - * @return The Boolean value associated with the specified key. - */ - boolean getBoolean(String key) { - if (toUpdate.containsKey(key)) - return Value.toBoolean(toUpdate.get(key).toString()); - return getContent().getBoolean(key); - } - - /** - * Returns the Boolean value associated with the specified key, or the - * default value if the key does not exist. Boolean values can be converted - * from: 0, 1, true, false. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The Boolean value associated with the specified key. - */ - boolean getBoolean(String key, boolean defaultValue) { - if (toUpdate.containsKey(key)) - return Value.toBoolean(toUpdate.get(key).toString()); - return getContent().getBoolean(key, defaultValue); - } - - /** - * Returns the long value associated with the specified key. Long values - * can be converted from: number, numberMB, numberGB. - * - * @param key The key to look up. - * @return The long value associated with the specified key. - */ - long getByteCount(String key) { - if (toUpdate.containsKey(key)) - return Value.toByteCount(toUpdate.get(key).toString()); - return getContent().getByteCount(key); - } - - /** - * Returns the long value associated with the specified key, or the default - * value if the key does not exist. Long values can be converted from: - * number, numberMB, numberGB. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The long value associated with the specified key. - */ - long getByteCount(String key, long defaultValue) { - if (toUpdate.containsKey(key)) - return Value.toByteCount(toUpdate.get(key).toString()); - return getContent().getByteCount(key, defaultValue); - } - - protected Record getContent() { - return validate().content; - } - - /** - * Returns a date value associated with the specified key. Date values can - * be converted from standard UTC time formats. - * - * @param key The key to look up. - * @return The date value associated with the specified key. - */ - Date getDate(String key) { - if (toUpdate.containsKey(key)) - return Value.toDate(toUpdate.get(key).toString()); - if (getContent().containsKey(key)) { - return getContent().getDate(key); - } else { - return null; - } - } - - /** - * Returns a date value associated with the specified key, or the default - * value if the key does not exist. Date values can be converted from - * standard UTC time formats. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The date value associated with the specified key. - */ - Date getDate(String key, Date defaultValue) { - if (toUpdate.containsKey(key)) - return Value.toDate(toUpdate.get(key).toString()); - return getContent().getDate(key, defaultValue); - } - - /** - * Returns the floating point value associated with the specified key. - * - * @param key The key to look up. - * @return The floating point value associated with the specified key. - */ - float getFloat(String key) { - if (toUpdate.containsKey(key)) - return Value.toFloat(toUpdate.get(key).toString()); - return getContent().getFloat(key); - } - - /** - * Returns the integer point value associated with the specified key. - * - * @param key The key to look up. - * @return The integer point value associated with the specified key. - */ - int getInteger(String key) { - if (toUpdate.containsKey(key)) - return Value.toInteger(toUpdate.get(key).toString()); - return getContent().getInteger(key); - } - - /** - * Returns the integer value associated with the specified key. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The integer value associated with the specified key. - */ - int getInteger(String key, int defaultValue) { - if (toUpdate.containsKey(key)) - return Value.toInteger(toUpdate.get(key).toString()); - return getContent().getInteger(key, defaultValue); - } - - /** - * Returns the long value associated with the specified key. - * - * @param key The key to look up. - * @return The long value associated with the specified key. - */ - long getLong(String key) { - if (toUpdate.containsKey(key)) - return Value.toLong(toUpdate.get(key).toString()); - return getContent().getLong(key); - } - - /** - * Returns the long value associated with the specified key. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The long value associated with the specified key. - */ - long getLong(String key, int defaultValue) { - if (toUpdate.containsKey(key)) - return Value.toLong(toUpdate.get(key).toString()); - return getContent().getLong(key, defaultValue); - } - - /** - * Returns the metadata (eai:acl) of this entity. This data includes - * permissions for accessing the resource, and values that indicate - * which resource fields are wildcards, required, and optional. - * - * @return The metadata of this entity, or {@code null} if none exist. - */ - public EntityMetadata getMetadata() { - // CONSIDER: For entities that don't have an eai:acl field, which is - // uncommon but does happen at least in the case of a DeploymentClient - // that is not enabled, we return null. A slightly friendlier option - // would be to return a metadata instance that defaults all values? - if (!containsKey("eai:acl")) return null; - return new EntityMetadata(this); - } - - /** - * Returns the string value associated with the specified key. - * - * @param key The key to look up. - * @return The string value associated with the specified key. - */ - String getString(String key) { - if (toUpdate.containsKey(key)) - return toUpdate.get(key).toString(); - return getContent().getString(key); - } - - /** - * Returns the string value associated with the specified key, or the - * default value if the key does not exist. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The string value associated with the specified key. - */ - String getString(String key, String defaultValue) { - if (toUpdate.containsKey(key)) - return toUpdate.get(key).toString(); - return getContent().getString(key, defaultValue); - } - - /** - * Returns the string array value associated with the specified key. - * - * @param key The key to look up. - * @return The string array value associated with the specified key. - */ - String[] getStringArray(String key) { - if (toUpdate.containsKey(key)) { - return ((String)toUpdate.get(key)).split("\\|"); - } - return getContent().getStringArray(key); - } - - /** - * Returns the string array value associated with the specified key, or the - * default value if the key does not exist. - * - * @param key The key to look up. - * @param defaultValue The default value. - * @return The string array value associated with the specified key. - */ - String[] getStringArray(String key, String[] defaultValue) { - if (toUpdate.containsKey(key)) - return getStringArray(key); - return getContent().getStringArray(key, defaultValue); - } - - /** {@inheritDoc} */ - public boolean isEmpty() { - return getContent().isEmpty(); - } - - /** - * Indicates whether this entity is disabled. This method is - * available on almost every endpoint. - * - * @return {@code true} if this entity is disabled, {@code false} if - * enabled. - */ - public boolean isDisabled() { - return getBoolean("disabled", false); - } - - /** - * Returns whether this entity's name can be changed via {@link #update}. - * - * Most entity names cannot be changed in this way. - * @return false. - */ - protected boolean isNameChangeAllowed() { - return false; - } - - /** {@inheritDoc} */ - public Set<String> keySet() { - return getContent().keySet(); - } - - @Override - Entity load(AtomObject value) { - super.load(value); - AtomEntry entry = (AtomEntry)value; - if (entry == null) { - content = new Record(); - } - else { - content = entry.content; - } - return this; - } - - /** {@inheritDoc} */ - public Object put(String key, Object value) { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - public void putAll(Map<? extends String, ? extends Object> map) { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - @Override public Entity refresh() { - // Update any attribute values set by a setter method that has not - // yet been written to the object. - ResponseMessage response = service.get(path); - assert(response.getStatus() == 200); - AtomFeed feed; - try { - feed = AtomFeed.parseStream(response.getContent()); - } catch (Exception e) { - throw new RuntimeException(e); - } - int count = feed.entries.size(); - if (count > 1) { - throw new IllegalStateException("Expected 0 or 1 Atom entries; found " + feed.entries.size()); - } - AtomEntry entry = count == 0 ? null : feed.entries.get(0); - load(entry); - return this; - } - - /** {@inheritDoc} */ - public Object remove(Object key) { - throw new UnsupportedOperationException(); - } - - /** - * Sets the local cache update value. Writing is deferred until - * {@code update} has been called. - * - * @param key The key to set. - * @param value The default value. - */ - void setCacheValue(String key, Object value) { - toUpdate.put(key, value); - } - - /** {@inheritDoc} */ - public int size() { - return getContent().size(); - } - - /** - * Updates the entity with the values you previously set using the setter - * methods, and any additional specified arguments. The specified arguments - * take precedent over the values that were set using the setter methods. - * - * @param args The arguments to update. - */ - public void update(Map<String, Object> args) { - if (!toUpdate.isEmpty() || !args.isEmpty()) { - // Merge cached setters and live args together before updating. - Map<String, Object> mergedArgs = - new LinkedHashMap<>(); - mergedArgs.putAll(toUpdate); - mergedArgs.putAll(args); - - if (mergedArgs.containsKey("name") && !isNameChangeAllowed()) { - throw new IllegalStateException("Cannot set 'name' on an existing entity."); - } - - service.post(actionPath("edit"), mergedArgs); - toUpdate.clear(); - invalidate(); - } - } - - /** - * Updates the entity with the accumulated arguments, established by the - * individual setter methods for each specific entity class. - */ - @SuppressWarnings("unchecked") - public void update() { - update(Collections.EMPTY_MAP); - } - - - /** - * Update the access control list (ACL) properties for this entity, - * - * @param args: Properties to update for this entity. - * Required Properties in 'args' - * - `owner`: The Splunk username, such as "admin". A value of "nobody" means no specific user. - * - `sharing`: A mode that indicates how the resource is shared. The sharing mode can be "user", "app", "global", or "system". - */ - public void aclUpdate(Map<String, Object> args){ - if(!args.containsKey("sharing")){ - throw new IllegalArgumentException("Required argument 'sharing' is missing."); - } - if(!args.containsKey("owner")){ - throw new IllegalArgumentException("Required argument 'owner' is missing."); - } - service.post(actionPath("acl"), args); - invalidate(); - } - - /** - * Removes this entity from its corresponding collection. - */ - public void remove() { - service.delete(actionPath("remove")); - } - - /** {@inheritDoc} */ - @Override public Entity validate() { - super.validate(); - return this; - } - - /** {@inheritDoc} */ - public Collection<Object> values() { - return getContent().values(); - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.*; + +/** + * The {@code Entity} class represents a Splunk entity. + */ +public class Entity extends Resource implements Map<String, Object> { + protected Record content; + protected HashMap<String, Object> toUpdate = new LinkedHashMap<>(); + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The entity's endpoint. + */ + public Entity(Service service, String path) { + super(service, path); + } + + /** + * Returns the path that corresponds to the requested action. + * + * @param action The requested action. + * @return The return path. + */ + protected String actionPath(String action) { + if (action.equals("disable")) + return path + "/disable"; + if (action.equals("edit")) + return path; + if (action.equals("enable")) + return path + "/enable"; + if (action.equals("remove")) + return path; + if (action.equals("acl")) + return path + "/acl"; + throw new IllegalArgumentException("Invalid action: " + action); + } + + /** {@inheritDoc} */ + public void clear() { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + public boolean containsKey(Object key) { + return getContent().containsKey(key); + } + + /** {@inheritDoc} */ + public boolean containsValue(Object value) { + return getContent().containsValue(value); + } + + /** + * Disables the entity that is named by this endpoint. This method is + * available on almost every endpoint. + */ + public void disable() { + service.post(actionPath("disable")); + invalidate(); + } + + /** + * Enables the entity that is named by this endpoint. This method is + * available on almost every endpoint. + */ + public void enable() { + service.post(actionPath("enable")); + invalidate(); + } + + /** {@inheritDoc} */ + public Set<Map.Entry<String, Object>> entrySet() { + return getContent().entrySet(); + } + + /** {@inheritDoc} */ + public Object get(Object key) { + if (toUpdate.containsKey(key)) return toUpdate.get(key); + return getContent().get(key); + } + + /** + * Returns the Boolean value associated with the specified key. Values + * can be converted from: 0, 1, true, false. + * + * @param key The key to look up. + * @return The Boolean value associated with the specified key. + */ + boolean getBoolean(String key) { + if (toUpdate.containsKey(key)) + return Value.toBoolean(toUpdate.get(key).toString()); + return getContent().getBoolean(key); + } + + /** + * Returns the Boolean value associated with the specified key, or the + * default value if the key does not exist. Boolean values can be converted + * from: 0, 1, true, false. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The Boolean value associated with the specified key. + */ + boolean getBoolean(String key, boolean defaultValue) { + if (toUpdate.containsKey(key)) + return Value.toBoolean(toUpdate.get(key).toString()); + return getContent().getBoolean(key, defaultValue); + } + + /** + * Returns the long value associated with the specified key. Long values + * can be converted from: number, numberMB, numberGB. + * + * @param key The key to look up. + * @return The long value associated with the specified key. + */ + long getByteCount(String key) { + if (toUpdate.containsKey(key)) + return Value.toByteCount(toUpdate.get(key).toString()); + return getContent().getByteCount(key); + } + + /** + * Returns the long value associated with the specified key, or the default + * value if the key does not exist. Long values can be converted from: + * number, numberMB, numberGB. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The long value associated with the specified key. + */ + long getByteCount(String key, long defaultValue) { + if (toUpdate.containsKey(key)) + return Value.toByteCount(toUpdate.get(key).toString()); + return getContent().getByteCount(key, defaultValue); + } + + protected Record getContent() { + return validate().content; + } + + /** + * Returns a date value associated with the specified key. Date values can + * be converted from standard UTC time formats. + * + * @param key The key to look up. + * @return The date value associated with the specified key. + */ + Date getDate(String key) { + if (toUpdate.containsKey(key)) + return Value.toDate(toUpdate.get(key).toString()); + if (getContent().containsKey(key)) { + return getContent().getDate(key); + } else { + return null; + } + } + + /** + * Returns a date value associated with the specified key, or the default + * value if the key does not exist. Date values can be converted from + * standard UTC time formats. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The date value associated with the specified key. + */ + Date getDate(String key, Date defaultValue) { + if (toUpdate.containsKey(key)) + return Value.toDate(toUpdate.get(key).toString()); + return getContent().getDate(key, defaultValue); + } + + /** + * Returns the floating point value associated with the specified key. + * + * @param key The key to look up. + * @return The floating point value associated with the specified key. + */ + float getFloat(String key) { + if (toUpdate.containsKey(key)) + return Value.toFloat(toUpdate.get(key).toString()); + return getContent().getFloat(key); + } + + /** + * Returns the integer point value associated with the specified key. + * + * @param key The key to look up. + * @return The integer point value associated with the specified key. + */ + int getInteger(String key) { + if (toUpdate.containsKey(key)) + return Value.toInteger(toUpdate.get(key).toString()); + return getContent().getInteger(key); + } + + /** + * Returns the integer value associated with the specified key. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The integer value associated with the specified key. + */ + int getInteger(String key, int defaultValue) { + if (toUpdate.containsKey(key)) + return Value.toInteger(toUpdate.get(key).toString()); + return getContent().getInteger(key, defaultValue); + } + + /** + * Returns the long value associated with the specified key. + * + * @param key The key to look up. + * @return The long value associated with the specified key. + */ + long getLong(String key) { + if (toUpdate.containsKey(key)) + return Value.toLong(toUpdate.get(key).toString()); + return getContent().getLong(key); + } + + /** + * Returns the long value associated with the specified key. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The long value associated with the specified key. + */ + long getLong(String key, int defaultValue) { + if (toUpdate.containsKey(key)) + return Value.toLong(toUpdate.get(key).toString()); + return getContent().getLong(key, defaultValue); + } + + /** + * Returns the metadata (eai:acl) of this entity. This data includes + * permissions for accessing the resource, and values that indicate + * which resource fields are wildcards, required, and optional. + * + * @return The metadata of this entity, or {@code null} if none exist. + */ + public EntityMetadata getMetadata() { + // CONSIDER: For entities that don't have an eai:acl field, which is + // uncommon but does happen at least in the case of a DeploymentClient + // that is not enabled, we return null. A slightly friendlier option + // would be to return a metadata instance that defaults all values? + if (!containsKey("eai:acl")) return null; + return new EntityMetadata(this); + } + + /** + * Returns the string value associated with the specified key. + * + * @param key The key to look up. + * @return The string value associated with the specified key. + */ + String getString(String key) { + if (toUpdate.containsKey(key)) + return toUpdate.get(key).toString(); + return getContent().getString(key); + } + + /** + * Returns the string value associated with the specified key, or the + * default value if the key does not exist. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The string value associated with the specified key. + */ + String getString(String key, String defaultValue) { + if (toUpdate.containsKey(key)) + return toUpdate.get(key).toString(); + return getContent().getString(key, defaultValue); + } + + /** + * Returns the string array value associated with the specified key. + * + * @param key The key to look up. + * @return The string array value associated with the specified key. + */ + String[] getStringArray(String key) { + if (toUpdate.containsKey(key)) { + return ((String)toUpdate.get(key)).split("\\|"); + } + return getContent().getStringArray(key); + } + + /** + * Returns the string array value associated with the specified key, or the + * default value if the key does not exist. + * + * @param key The key to look up. + * @param defaultValue The default value. + * @return The string array value associated with the specified key. + */ + String[] getStringArray(String key, String[] defaultValue) { + if (toUpdate.containsKey(key)) + return getStringArray(key); + return getContent().getStringArray(key, defaultValue); + } + + /** {@inheritDoc} */ + public boolean isEmpty() { + return getContent().isEmpty(); + } + + /** + * Indicates whether this entity is disabled. This method is + * available on almost every endpoint. + * + * @return {@code true} if this entity is disabled, {@code false} if + * enabled. + */ + public boolean isDisabled() { + return getBoolean("disabled", false); + } + + /** + * Returns whether this entity's name can be changed via {@link #update}. + * + * Most entity names cannot be changed in this way. + * @return false. + */ + protected boolean isNameChangeAllowed() { + return false; + } + + /** {@inheritDoc} */ + public Set<String> keySet() { + return getContent().keySet(); + } + + @Override + Entity load(AtomObject value) { + super.load(value); + AtomEntry entry = (AtomEntry)value; + if (entry == null) { + content = new Record(); + } + else { + content = entry.content; + } + return this; + } + + /** {@inheritDoc} */ + public Object put(String key, Object value) { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + public void putAll(Map<? extends String, ? extends Object> map) { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + @Override public Entity refresh() { + // Update any attribute values set by a setter method that has not + // yet been written to the object. + ResponseMessage response = service.get(path); + assert(response.getStatus() == 200); + AtomFeed feed; + try { + feed = AtomFeed.parseStream(response.getContent()); + } catch (Exception e) { + throw new RuntimeException(e); + } + int count = feed.entries.size(); + if (count > 1) { + throw new IllegalStateException("Expected 0 or 1 Atom entries; found " + feed.entries.size()); + } + AtomEntry entry = count == 0 ? null : feed.entries.get(0); + load(entry); + return this; + } + + /** {@inheritDoc} */ + public Object remove(Object key) { + throw new UnsupportedOperationException(); + } + + /** + * Sets the local cache update value. Writing is deferred until + * {@code update} has been called. + * + * @param key The key to set. + * @param value The default value. + */ + void setCacheValue(String key, Object value) { + toUpdate.put(key, value); + } + + /** {@inheritDoc} */ + public int size() { + return getContent().size(); + } + + /** + * Updates the entity with the values you previously set using the setter + * methods, and any additional specified arguments. The specified arguments + * take precedent over the values that were set using the setter methods. + * + * @param args The arguments to update. + */ + public void update(Map<String, Object> args) { + if (!toUpdate.isEmpty() || !args.isEmpty()) { + // Merge cached setters and live args together before updating. + Map<String, Object> mergedArgs = + new LinkedHashMap<>(); + mergedArgs.putAll(toUpdate); + mergedArgs.putAll(args); + + if (mergedArgs.containsKey("name") && !isNameChangeAllowed()) { + throw new IllegalStateException("Cannot set 'name' on an existing entity."); + } + + service.post(actionPath("edit"), mergedArgs); + toUpdate.clear(); + invalidate(); + } + } + + /** + * Updates the entity with the accumulated arguments, established by the + * individual setter methods for each specific entity class. + */ + @SuppressWarnings("unchecked") + public void update() { + update(Collections.EMPTY_MAP); + } + + + /** + * Update the access control list (ACL) properties for this entity, + * + * @param args: Properties to update for this entity. + * Required Properties in 'args' + * - `owner`: The Splunk username, such as "admin". A value of "nobody" means no specific user. + * - `sharing`: A mode that indicates how the resource is shared. The sharing mode can be "user", "app", "global", or "system". + */ + public void aclUpdate(Map<String, Object> args){ + if(!args.containsKey("sharing")){ + throw new IllegalArgumentException("Required argument 'sharing' is missing."); + } + if(!args.containsKey("owner")){ + throw new IllegalArgumentException("Required argument 'owner' is missing."); + } + service.post(actionPath("acl"), args); + invalidate(); + } + + /** + * Removes this entity from its corresponding collection. + */ + public void remove() { + service.delete(actionPath("remove")); + } + + /** {@inheritDoc} */ + @Override public Entity validate() { + super.validate(); + return this; + } + + /** {@inheritDoc} */ + public Collection<Object> values() { + return getContent().values(); + } +} + diff --git a/splunk/src/main/java/com/splunk/EntityCollection.java b/splunk/src/main/java/com/splunk/EntityCollection.java index a0378d60..588bd413 100644 --- a/splunk/src/main/java/com/splunk/EntityCollection.java +++ b/splunk/src/main/java/com/splunk/EntityCollection.java @@ -138,7 +138,7 @@ public T remove(String key, Args namespace) { if (!containsKey(key)) return null; LinkedList<T> entities = items.get(key); String pathMatcher = service.fullpath("", namespace); - if (entities == null || entities.size() == 0) return null; + if (entities == null || entities.isEmpty()) return null; for (T entity: entities) { if (entity.path.startsWith(pathMatcher)) { entity.remove(); diff --git a/splunk/src/main/java/com/splunk/Event.java b/splunk/src/main/java/com/splunk/Event.java index 4c31e535..42dcb377 100644 --- a/splunk/src/main/java/com/splunk/Event.java +++ b/splunk/src/main/java/com/splunk/Event.java @@ -1,214 +1,214 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.HashMap; -import java.util.Map; -import java.util.regex.Pattern; - -/** - * The {@code Event} class wraps an individual event or result that was returned - * by the {@link ResultsReader#getNextEvent} method. - * <p> - * An event maps each field name to a list of zero of more values. - * These values can be accessed as either an array (using the {@link #getArray} - * method) or as a delimited string (using the {@link #get} method). We - * recommend accessing values as an array when possible. - * <p> - * The delimiter for field values depends on the underlying result format. - * If the underlying format does not specify a delimiter, such as with the - * {@link ResultsReaderXml} class, the delimiter is a comma (,). - */ -public class Event extends HashMap<String, String> { - private Map<String, String[]> arrayValues = new HashMap<>(); - private String segmentedRaw; - - // Prevent non-SDK instantiation. - Event() { - // nothing - } - - /** - * Sets the single value or delimited set of values for the specified - * field name. - * - * When setting a multi-valued field, use the - * {@link #putArray(String, String[])} method instead. - * - * @param key The field name. - * @param valueOrDelimitedValues The single values or delimited set of - * values. - */ - String putSingleOrDelimited(String key, String valueOrDelimitedValues) { - return super.put(key, valueOrDelimitedValues); - } - - /** - * Sets the values for the specified field name, with the assumption that - * the value delimiter is a comma (,). - * - * @param key The field name. - * @param values The delimited set of values. - */ - void putArray(String key, String[] values) { - arrayValues.put(key, values); - - // For backward compatibility with the Map interface - super.put(key, Util.join(",", values)); - } - - /** - * Sets the value for the XML element for the {@code _raw} field. This value - * is only used by the {@link ResultsReaderXml} class. - * @param value The text of the XML element. - */ - void putSegmentedRaw(String value) { - segmentedRaw = value; - } - - /** - * Returns the single value or delimited set of values for the specified - * field name, or {@code null} if the specified field is not present. - * - * When getting a multi-valued field, use the {@link #getArray(String)} or - * {@link #getArray(String, String)} method instead. - * - * @param key The field name. - * @return The single value or delimited set of values. - */ - public String get(String key) { - return super.get(key); - } - - /** - * Gets the values for the specified field name. - * <br><br> - * <b>Caution:</b> This variant of {@link #getArray(String, String)} is - * unsafe for {@link ResultsReader} implementations that require a - * delimiter. Therefore, this method should only be used for results that - * are returned by {@link ResultsReaderXml}. For other readers, use the - * {@link #getArray(String, String)} method instead. - * <br><br> - * If the underlying {@link ResultsReader} object has no delimiter, the - * original array of values is returned. If the object <i>does</i> have a - * delimiter, the single/delimited value is assumed to be a single value and - * is returned as a single-valued array. - * - * @param key The field name. - * @return The original array of values if there is no delimiter, or the - * single-valued array. - */ - public String[] getArray(String key) { - String[] arrayValue = arrayValues.get(key); - if (arrayValue != null) { - return arrayValue; - } - - String singleValue = super.get(key); - if (singleValue == null) { - return null; - } - return new String[] { singleValue }; - } - - /** - * Gets the values for the specified field name. - * - * The delimiter must be determined empirically based on the search - * string and the data format of the index. The delimiter can differ - * between fields in the same {@link Event} object. - * - * The delimiter is ignored for {@link ResultsReader} implementations - * that do not require a delimiter, such as {@link ResultsReaderXml}. - * - * If the underlying {@link ResultsReader} object has no delimiter, the - * original array of values is returned (and the specified delimiter is - * ignored). If the object <i>does</i> have a delimiter, the - * single/delimited value is split based on the specified delimiter and is - * returned as an array. - * - * @param key The field name. - * @param delimiter The delimiter. - * @return The original array of values if there is no delimiter, or the - * array of values split by delimiter. - */ - public String[] getArray(String key, String delimiter) { - String[] arrayValue = arrayValues.get(key); - if (arrayValue != null) { - return arrayValue; - } - - String delimitedValues = super.get(key); - if (delimitedValues == null) { - return null; - } - return delimitedValues.split(Pattern.quote(delimiter)); - } - - /** - * Gets the XML markup for the {@code "_raw"} field value. This value - * is only used by the {@link ResultsReaderXml} class. - * <p> - * The return value is different than that of {@code get("_raw")} - * in that this segmented raw value is an XML fragment that includes all - * markup such as XML tags and escaped characters. - * <p> - * For example, {@code get("_raw")} returns this: - * <p> - * {@code "http://localhost:8000/en-US/app/search/flashtimeline?q=search%20search%20index%3D_internal%20%7C%20head%2010&earliest=rt-1h&latest=rt"} - * <p> - * The {@code getSegmentedRaw} method returns this: - * <p> - * {@code <v xml:space="preserve" trunc="0">"http://localhost:8000/en-US/app/<sg h=\"1\">search</sg>/flashtimeline?q=<sg h=\"1\">search</sg>%20<sg h=\"1\">search</sg>%20index%3D_internal%20%7C%20head%2010&earliest=rt-1h&latest=rt"</v>} - * @return the segmented raw xml including tags and escaped characters. - */ - public String getSegmentedRaw() { - if (segmentedRaw == null) { - // ResultsReaderXml will always set this to not null. Using this - // method for other result reader is not supported. - throw new UnsupportedOperationException( - "The value is not available. Use ResultsReaderXml instead."); - } - return segmentedRaw; - } - // === Read Only === - - @Override - public void clear() { - throw new UnsupportedOperationException(); - } - - @Override - public Object clone() { - throw new UnsupportedOperationException(); - } - - @Override - public String put(String key, String value) { - throw new UnsupportedOperationException(); - } - - @Override - public void putAll(Map<? extends String, ? extends String> m) { - throw new UnsupportedOperationException(); - } - - @Override - public String remove(Object key) { - throw new UnsupportedOperationException(); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Pattern; + +/** + * The {@code Event} class wraps an individual event or result that was returned + * by the {@link ResultsReader#getNextEvent} method. + * <p> + * An event maps each field name to a list of zero of more values. + * These values can be accessed as either an array (using the {@link #getArray} + * method) or as a delimited string (using the {@link #get} method). We + * recommend accessing values as an array when possible. + * <p> + * The delimiter for field values depends on the underlying result format. + * If the underlying format does not specify a delimiter, such as with the + * {@link ResultsReaderXml} class, the delimiter is a comma (,). + */ +public class Event extends HashMap<String, String> { + private Map<String, String[]> arrayValues = new HashMap<>(); + private String segmentedRaw; + + // Prevent non-SDK instantiation. + Event() { + // nothing + } + + /** + * Sets the single value or delimited set of values for the specified + * field name. + * + * When setting a multi-valued field, use the + * {@link #putArray(String, String[])} method instead. + * + * @param key The field name. + * @param valueOrDelimitedValues The single values or delimited set of + * values. + */ + String putSingleOrDelimited(String key, String valueOrDelimitedValues) { + return super.put(key, valueOrDelimitedValues); + } + + /** + * Sets the values for the specified field name, with the assumption that + * the value delimiter is a comma (,). + * + * @param key The field name. + * @param values The delimited set of values. + */ + void putArray(String key, String[] values) { + arrayValues.put(key, values); + + // For backward compatibility with the Map interface + super.put(key, Util.join(",", values)); + } + + /** + * Sets the value for the XML element for the {@code _raw} field. This value + * is only used by the {@link ResultsReaderXml} class. + * @param value The text of the XML element. + */ + void putSegmentedRaw(String value) { + segmentedRaw = value; + } + + /** + * Returns the single value or delimited set of values for the specified + * field name, or {@code null} if the specified field is not present. + * + * When getting a multi-valued field, use the {@link #getArray(String)} or + * {@link #getArray(String, String)} method instead. + * + * @param key The field name. + * @return The single value or delimited set of values. + */ + public String get(String key) { + return super.get(key); + } + + /** + * Gets the values for the specified field name. + * <br><br> + * <b>Caution:</b> This variant of {@link #getArray(String, String)} is + * unsafe for {@link ResultsReader} implementations that require a + * delimiter. Therefore, this method should only be used for results that + * are returned by {@link ResultsReaderXml}. For other readers, use the + * {@link #getArray(String, String)} method instead. + * <br><br> + * If the underlying {@link ResultsReader} object has no delimiter, the + * original array of values is returned. If the object <i>does</i> have a + * delimiter, the single/delimited value is assumed to be a single value and + * is returned as a single-valued array. + * + * @param key The field name. + * @return The original array of values if there is no delimiter, or the + * single-valued array. + */ + public String[] getArray(String key) { + String[] arrayValue = arrayValues.get(key); + if (arrayValue != null) { + return arrayValue; + } + + String singleValue = super.get(key); + if (singleValue == null) { + return null; + } + return new String[] { singleValue }; + } + + /** + * Gets the values for the specified field name. + * + * The delimiter must be determined empirically based on the search + * string and the data format of the index. The delimiter can differ + * between fields in the same {@link Event} object. + * + * The delimiter is ignored for {@link ResultsReader} implementations + * that do not require a delimiter, such as {@link ResultsReaderXml}. + * + * If the underlying {@link ResultsReader} object has no delimiter, the + * original array of values is returned (and the specified delimiter is + * ignored). If the object <i>does</i> have a delimiter, the + * single/delimited value is split based on the specified delimiter and is + * returned as an array. + * + * @param key The field name. + * @param delimiter The delimiter. + * @return The original array of values if there is no delimiter, or the + * array of values split by delimiter. + */ + public String[] getArray(String key, String delimiter) { + String[] arrayValue = arrayValues.get(key); + if (arrayValue != null) { + return arrayValue; + } + + String delimitedValues = super.get(key); + if (delimitedValues == null) { + return null; + } + return delimitedValues.split(Pattern.quote(delimiter)); + } + + /** + * Gets the XML markup for the {@code "_raw"} field value. This value + * is only used by the {@link ResultsReaderXml} class. + * <p> + * The return value is different than that of {@code get("_raw")} + * in that this segmented raw value is an XML fragment that includes all + * markup such as XML tags and escaped characters. + * <p> + * For example, {@code get("_raw")} returns this: + * <p> + * {@code "http://localhost:8000/en-US/app/search/flashtimeline?q=search%20search%20index%3D_internal%20%7C%20head%2010&earliest=rt-1h&latest=rt"} + * <p> + * The {@code getSegmentedRaw} method returns this: + * <p> + * {@code <v xml:space="preserve" trunc="0">"http://localhost:8000/en-US/app/<sg h=\"1\">search</sg>/flashtimeline?q=<sg h=\"1\">search</sg>%20<sg h=\"1\">search</sg>%20index%3D_internal%20%7C%20head%2010&earliest=rt-1h&latest=rt"</v>} + * @return the segmented raw xml including tags and escaped characters. + */ + public String getSegmentedRaw() { + if (segmentedRaw == null) { + // ResultsReaderXml will always set this to not null. Using this + // method for other result reader is not supported. + throw new UnsupportedOperationException( + "The value is not available. Use ResultsReaderXml instead."); + } + return segmentedRaw; + } + // === Read Only === + + @Override + public void clear() { + throw new UnsupportedOperationException(); + } + + @Override + public Object clone() { + throw new UnsupportedOperationException(); + } + + @Override + public String put(String key, String value) { + throw new UnsupportedOperationException(); + } + + @Override + public void putAll(Map<? extends String, ? extends String> m) { + throw new UnsupportedOperationException(); + } + + @Override + public String remove(Object key) { + throw new UnsupportedOperationException(); + } +} diff --git a/splunk/src/main/java/com/splunk/ExportResultsStream.java b/splunk/src/main/java/com/splunk/ExportResultsStream.java index c56067dd..175a3781 100644 --- a/splunk/src/main/java/com/splunk/ExportResultsStream.java +++ b/splunk/src/main/java/com/splunk/ExportResultsStream.java @@ -1,31 +1,31 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.FilterInputStream; -import java.io.InputStream; - -/** - * The {@code ExportResultsStream} class represents a stream constructed by the - * {@link Service#export} method. - * - */ -class ExportResultsStream extends FilterInputStream { - public ExportResultsStream(InputStream stream) { - super(stream); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.FilterInputStream; +import java.io.InputStream; + +/** + * The {@code ExportResultsStream} class represents a stream constructed by the + * {@link Service#export} method. + * + */ +class ExportResultsStream extends FilterInputStream { + public ExportResultsStream(InputStream stream) { + super(stream); + } +} diff --git a/splunk/src/main/java/com/splunk/FieldType.java b/splunk/src/main/java/com/splunk/FieldType.java index 42d0264b..e6bc6b2c 100644 --- a/splunk/src/main/java/com/splunk/FieldType.java +++ b/splunk/src/main/java/com/splunk/FieldType.java @@ -1,71 +1,71 @@ -/* - * Copyright 2014 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.HashMap; -import java.util.Map; - -/** - * Represents the type of a field in a data model object. - */ -public enum FieldType { - STRING { - public String toString() { return "string"; } - }, - NUMBER { - public String toString() { return "number"; } - }, - BOOLEAN { - public String toString() { return "boolean"; } - }, - IPV4 { - public String toString() { return "ipv4"; } - }, - TIMESTAMP { - public String toString() { return "timestamp"; } - }, - CHILDCOUNT { - public String toString() { return "childcount"; } - }, - OBJECTCOUNT { - public String toString() { return "objectcount"; } - }, - UNDEFINED { - public String toString() { - throw new UnsupportedOperationException("No serialization for undefined field type."); - } - }; - - private final static Map<String, FieldType> typeLookup = new HashMap<>() {{ - put("string", STRING); - put("number", NUMBER); - put("boolean", BOOLEAN); - put("ipv4", IPV4); - put("timestamp", TIMESTAMP); - put("childcount", CHILDCOUNT); - put("objectcount", OBJECTCOUNT); - }}; - - public static FieldType parseType(String text) { - FieldType result = typeLookup.get(text.toLowerCase()); - if (result == null) { - result = UNDEFINED; - } - return result; - } - -}; +/* + * Copyright 2014 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.HashMap; +import java.util.Map; + +/** + * Represents the type of a field in a data model object. + */ +public enum FieldType { + STRING { + public String toString() { return "string"; } + }, + NUMBER { + public String toString() { return "number"; } + }, + BOOLEAN { + public String toString() { return "boolean"; } + }, + IPV4 { + public String toString() { return "ipv4"; } + }, + TIMESTAMP { + public String toString() { return "timestamp"; } + }, + CHILDCOUNT { + public String toString() { return "childcount"; } + }, + OBJECTCOUNT { + public String toString() { return "objectcount"; } + }, + UNDEFINED { + public String toString() { + throw new UnsupportedOperationException("No serialization for undefined field type."); + } + }; + + private final static Map<String, FieldType> typeLookup = new HashMap<>() {{ + put("string", STRING); + put("number", NUMBER); + put("boolean", BOOLEAN); + put("ipv4", IPV4); + put("timestamp", TIMESTAMP); + put("childcount", CHILDCOUNT); + put("objectcount", OBJECTCOUNT); + }}; + + public static FieldType parseType(String text) { + FieldType result = typeLookup.get(text.toLowerCase()); + if (result == null) { + result = UNDEFINED; + } + return result; + } + +}; diff --git a/splunk/src/main/java/com/splunk/FiredAlertGroup.java b/splunk/src/main/java/com/splunk/FiredAlertGroup.java index 2864b250..ec643cfa 100644 --- a/splunk/src/main/java/com/splunk/FiredAlertGroup.java +++ b/splunk/src/main/java/com/splunk/FiredAlertGroup.java @@ -1,43 +1,43 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; -/** - * The {@code FiredAlertGroup} class represents a group of fired alerts, which - * are the alerts for a given saved search. - */ -public class FiredAlertGroup extends Entity { - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The fired alert endpoint. - */ - FiredAlertGroup(Service service, String path) { - super(service, path); - } - - /** - * Returns a group of fired alerts for a given saved search. - * - * @return The fired alerts in the group. - */ - public EntityCollection<FiredAlert> getAlerts() { - return new EntityCollection<>( - service, this.path, FiredAlert.class); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; +/** + * The {@code FiredAlertGroup} class represents a group of fired alerts, which + * are the alerts for a given saved search. + */ +public class FiredAlertGroup extends Entity { + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The fired alert endpoint. + */ + FiredAlertGroup(Service service, String path) { + super(service, path); + } + + /** + * Returns a group of fired alerts for a given saved search. + * + * @return The fired alerts in the group. + */ + public EntityCollection<FiredAlert> getAlerts() { + return new EntityCollection<>( + service, this.path, FiredAlert.class); + } +} diff --git a/splunk/src/main/java/com/splunk/HttpService.java b/splunk/src/main/java/com/splunk/HttpService.java index 1a36a3f5..8f87e243 100644 --- a/splunk/src/main/java/com/splunk/HttpService.java +++ b/splunk/src/main/java/com/splunk/HttpService.java @@ -1,619 +1,619 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import javax.net.ssl.*; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.net.*; -import java.security.cert.X509Certificate; -import java.util.*; -import java.util.Map.Entry; - -/** - * The {@code HttpService} class represents a generic HTTP service at a given - * address ({@code host:port}), accessed using a given protocol scheme - * ({@code http} or {@code https}). - */ -public class HttpService { - // For debugging purposes - private static final boolean VERBOSE_REQUESTS = false; - protected static SSLSecurityProtocol sslSecurityProtocol = null; - - /** - * Boolean flag for validating certificates at either of the sides (client/server). - * If true, then it will check and validate relevant certificates otherwise, in case of false, it will accept all certificates. - * For PROD environment, TRUE is strongly recommended, whereas working in localhost OR development environment, FALSE is used. - * Default Value: TRUE - */ - protected static boolean validateCertificates = true; - - private static SSLSocketFactory sslSocketFactory = createSSLFactory(); - private static String HTTPS_SCHEME = "https"; - private static String HTTP_SCHEME = "http"; - private static List<String> VALID_HOSTS = new ArrayList<>(Arrays.asList("localhost", "127.0.0.1", "::1")); - - private static final HostnameVerifier HOSTNAME_VERIFIER = new HostnameVerifier() { - public boolean verify(String s, SSLSession sslSession) { - if(VALID_HOSTS.contains(s)){ - return true; - } else { - HostnameVerifier hv = HttpsURLConnection.getDefaultHostnameVerifier(); - return hv.verify(s, sslSession); - } - } - }; - - /** - * A variable to hold an optional custom HTTPS handler - */ - protected URLStreamHandler httpsHandler = null; - - /** - * The scheme used to access the service. - */ - protected String scheme = "https"; - - /** - * The host name of the service. - */ - protected String host = "localhost"; - - /** - * The port number of the service. - */ - protected int port = 8089; - - protected Integer connectTimeout = null; - protected Integer readTimeout = null; - - private String prefix = null; - - static Map<String, String> defaultHeader = new HashMap<>() {{ - put("User-Agent", "splunk-sdk-java/1.9.5"); - put("Accept", "*/*"); - }}; - - protected Map<String, String> customHeaders = new HashMap<>(); - - protected SimpleCookieStore cookieStore = new SimpleCookieStore(); - - /** - * Constructs a new {@code HttpService} instance. - */ - public HttpService() { - } - - /** - * Constructs a new {@code HttpService} instance at the given host. - * - * @param host The host name of the service. - */ - public HttpService(String host) { - this.host = host; - } - - /** - * Constructs a new {@code HttpService} instance at the given host and port. - * - * @param host The host name of the service. - * @param port The port number of the service. - */ - public HttpService(String host, int port) { - this.host = host; - this.port = port; - } - - /** - * Constructs a new {@code HttpService} instance using the given host, - * port, and scheme. - * - * @param host The host name of the service. - * @param port The port number of the service. - * @param scheme Scheme for accessing the service ({@code http} or - * {@code https}). - */ - public HttpService(String host, int port, String scheme) { - this.host = host; - this.port = port; - this.scheme = scheme; - } - - /** - * Constructs a new {@code HttpService} instance using the given host, - * port, and scheme, and instructing it to use the specified HTTPS handler. - * - * @param host The host name of the service. - * @param port The port number of the service. - * @param scheme Scheme for accessing the service ({@code http} or - * {@code https}). - * @param httpsHandler A custom URL Stream handler. - */ - public HttpService(String host, int port, String scheme, - URLStreamHandler httpsHandler) { - this.host = host; - this.port = port; - this.scheme = scheme; - this.httpsHandler = httpsHandler; - } - - // Returns the count of arguments in the given {@code args} map. - private static int count(Map<String, Object> args) { - if (args == null) return 0; - return args.size(); - } - - /** - * Issues an HTTP GET request against the service using a given path. - * - * @param path The request path. - * @return The HTTP response. - */ - public ResponseMessage get(String path) { - return send(path, new RequestMessage("GET")); - } - - /** - * Issues an HTTP GET request against the service using a given path and - * query arguments. - * - * @param path The request path. - * @param args The query arguments. - * @return The HTTP response. - */ - public ResponseMessage get(String path, Map<String, Object> args) { - if (count(args) > 0) - path = path + "?" + Args.encode(args); - RequestMessage request = new RequestMessage("GET"); - return send(path, request); - } - - /** - * Returns the host name of this service. - * - * @return The host name. - */ - public String getHost() { - return this.host; - } - - /** - * Returns the port number of this service. - * - * @return The port number. - */ - public int getPort() { - return this.port; - } - - /** - * Sets Custom Headers of this service - * - * @param headers The custom headers. - */ - public void setCustomHeaders(Map<String, String> headers) { - if (Objects.nonNull(headers)) { - customHeaders = headers; - } - } - - /** - * Returns the SSL security protocol of this service. - * - * @return The SSL security protocol. - */ - public static SSLSecurityProtocol getSslSecurityProtocol() { - return sslSecurityProtocol; - } - - /** - * Sets the SSL security protocol of this service. - * @param securityProtocol The SSLSecurityProtocal instance - */ - public static void setSslSecurityProtocol(SSLSecurityProtocol securityProtocol) { - // Only update the SSL_SOCKET_FACTORY if changing protocols - if (sslSecurityProtocol != securityProtocol) { - sslSecurityProtocol = securityProtocol; - sslSocketFactory = createSSLFactory(); - } - } - - /** - * Adds list of Cluster Master Hosts to the list of Valid Hosts for Hostname verification. - * @param searchHeadService Splunk SearchHead Service instance - */ - public static void addClusterMasterURIsToHosts(Service searchHeadService){ - VALID_HOSTS.addAll(searchHeadService.getClusterMasters()); - } - - /** - * Returns the URL prefix of this service, consisting of - * {@code scheme://host[:port]}. - * - * @return The URL prefix. - */ - public String getPrefix() { - if (this.prefix == null) - this.prefix = String.format("%s://%s:%s", - this.scheme, this.host, this.port); - return this.prefix; - } - - /** - * Returns the scheme used by this service. - * - * @return The scheme. - */ - public String getScheme() { - return this.scheme; - } - - /** - * Constructs a fully-qualified URL for this service using a given path. - * - * @param path The path to qualify. - * @return The fully-qualified URL for the service. - */ - public URL getUrl(String path) { - try { - if (HTTPS_SCHEME.equals(getScheme()) && httpsHandler != null) { - // This branch is not currently covered by unit tests as I - // could not figure out a generic way to get the default - // HTTPS handler. - return new URL(getScheme(), getHost(), getPort(), path, - httpsHandler); - } else { - return new URL(getScheme(), getHost(), getPort(), path); - } - } catch (MalformedURLException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - - /** - * Returns all the stored custom headers - * - * @return customHeaders The custom headers - */ - public Map<String, String> getCustomHeaders() { - return customHeaders; - } - - /** - * Returns all the stored cookies - * - * @return All cookies as in a string in the format key=value; key=value; etc=etc - */ - public String stringifyCookies() { - return cookieStore.getCookies(); - } - - /** - * Adds the passed cookie header to the cookieStore - * - * @param setCookieHeader The result from a getRequestHeader("Set-Cookie") call - */ - public void addCookie(String setCookieHeader) { - cookieStore.add(setCookieHeader); - } - - /** - * Removes all cookies from the cookieStore - */ - public void removeAllCookies() { - cookieStore.removeAll(); - } - - /** - * Returns true if the cookieStore has any Splunk Authorization cookies, false otherwise - * - * @return True if there are cookies, false otherwise - */ - public Boolean hasSplunkAuthCookies() { - return cookieStore.hasSplunkAuthCookie(); - } - - /** - * Returns the connect timeout used by this service. - * - * @return The timeout in milliseconds. - */ - public Integer getConnectTimeout() { - return connectTimeout; - } - - /** - * Sets a specified timeout value, in milliseconds, to be used when opening a communications link. - * - * @param connectTimeout timeout in milliseconds, a timeout of zero is interpreted as an infinite timeout. - */ - public void setConnectTimeout(Integer connectTimeout) { - this.connectTimeout = connectTimeout; - } - - /** - * Returns the read timeout used by this service. - * - * @return The timeout in milliseconds. - */ - public Integer getReadTimeout() { - return readTimeout; - } - - /** - * Sets a specified timeout value, in milliseconds, to be used when reading from a communications link. - * - * @param readTimeout timeout in milliseconds, a timeout of zero is interpreted as an infinite timeout. - */ - public void setReadTimeout(Integer readTimeout) { - this.readTimeout = readTimeout; - } - - /** - * Issues a POST request against the service using a given path. - * - * @param path The request path. - * @return The HTTP response. - */ - public ResponseMessage post(String path) { - return post(path, null); - } - - /** - * Issues a POST request against the service using a given path and - * form arguments. - * - * @param path The request path. - * @param args The form arguments. - * @return The HTTP response. - */ - public ResponseMessage post(String path, Map<String, Object> args) { - RequestMessage request = new RequestMessage("POST"); - request.getHeader().put( - "Content-Type", "application/x-www-form-urlencoded"); - if (count(args) > 0) - request.setContent(Args.encode(args)); - return send(path, request); - } - - /** - * Issues a DELETE request against the service using a given path. - * - * @param path The request path. - * @return The HTTP response. - */ - public ResponseMessage delete(String path) { - RequestMessage request = new RequestMessage("DELETE"); - return send(path, request); - } - - /** - * Issues a DELETE request against the service using a given path - * and query arguments. - * - * @param path The request path. - * @param args The query arguments. - * @return The HTTP response. - */ - public ResponseMessage delete(String path, Map<String, Object> args) { - if (count(args) > 0) - path = path + "?" + Args.encode(args); - RequestMessage request = new RequestMessage("DELETE"); - return send(path, request); - } - - /** - * Opens a socket to this service. - * - * @return The socket. - * @throws IOException - */ - Socket open() throws IOException { - if (this.scheme.equals("https")) { - return sslSocketFactory.createSocket(this.host, this.port); - } - return new Socket(this.host, this.port); - } - - /** - * Issue an HTTP request against the service using a given path and - * request message. - * - * @param path The request path. - * @param request The request message. - * @return The HTTP response. - */ - public ResponseMessage send(String path, RequestMessage request) { - // Construct a full URL to the resource - URL url = getUrl(path); - // Create and initialize the connection object - HttpURLConnection cn; - try { - cn = (HttpURLConnection) url.openConnection(); - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - if (cn instanceof HttpsURLConnection cnInst) { - cnInst.setSSLSocketFactory(sslSocketFactory); - cnInst.setHostnameVerifier(HOSTNAME_VERIFIER); - } - cn.setUseCaches(false); - cn.setAllowUserInteraction(false); - cn.setConnectTimeout(connectTimeout == null ? 0 : connectTimeout); - cn.setReadTimeout(readTimeout == null ? 0 : readTimeout); - - // Set the request method - String method = request.getMethod(); - try { - cn.setRequestMethod(method); - } catch (ProtocolException e) { - throw new RuntimeException(e.getMessage(), e); - } - - // Add headers from request message - Map<String, String> header = request.getHeader(); - for (Entry<String, String> entry : header.entrySet()) - cn.setRequestProperty(entry.getKey(), entry.getValue()); - // Add default headers that were absent from the request message - for (Entry<String, String> entry : defaultHeader.entrySet()) { - String key = entry.getKey(); - if (header.containsKey(key)) continue; - cn.setRequestProperty(key, entry.getValue()); - } - // Add Custom Headers - for (Entry<String, String> entry: customHeaders.entrySet()) { - String key = entry.getKey(); - if (!header.containsKey(key)) { - cn.setRequestProperty(key, entry.getValue()); - } - } - - // Add cookies to header - cn.setRequestProperty("Cookie", cookieStore.getCookies()); - - // Write out request content, if any - try { - Object content = request.getContent(); - if (content != null) { - cn.setDoOutput(true); - OutputStream stream = cn.getOutputStream(); - OutputStreamWriter writer = new OutputStreamWriter(stream, "UTF-8"); - writer.write((String) content); - writer.close(); - } - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - - if (VERBOSE_REQUESTS) { - System.out.format("%s %s => ", method, url.toString()); - } - - // Execute the request - try { - cn.connect(); - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - - int status; - try { - status = cn.getResponseCode(); - } catch (IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - - InputStream input = null; - try { - input = status >= 400 - ? cn.getErrorStream() - : cn.getInputStream(); - } catch (IOException e) { - assert (false); - } - - // If user session has expired check for 'autologin' flag to either re-login or throw HTTPException - if(this instanceof Service && status == 401){ - return new ResponseMessage(401, input); - } - - // Add cookies to cookie Store - Map<String, List<String>> headers = cn.getHeaderFields(); - if (headers.containsKey("Set-Cookie")) { - for (String cookieHeader : headers.get("Set-Cookie")) { - if (cookieHeader != null && cookieHeader.length() > 0) - cookieStore.add(cookieHeader); - } - } - - ResponseMessage response = new ResponseMessage(status, input); - - if (VERBOSE_REQUESTS) { - System.out.format("%d\n", status); - if (method.equals("POST")) { - System.out.println(" " + request.getContent()); - } - } - - if (status >= 400) - throw HttpException.create(response); - - return response; - } - - public static void setSSLSocketFactory(SSLSocketFactory sslSocketFactory) { - if (sslSocketFactory == null) - throw new IllegalArgumentException("The sslSocketFactory cannot be null."); - HttpService.sslSocketFactory = sslSocketFactory; - } - - public static SSLSocketFactory getSSLSocketFactory() { - return HttpService.sslSocketFactory; - } - - public static void setValidateCertificates(boolean validateCertificate) { - // update the SSL_SOCKET_FACTORY if validateCertificates flag is changed - if (validateCertificates != validateCertificate) { - validateCertificates = validateCertificate; - sslSocketFactory = createSSLFactory(); - } - } - - public static SSLSocketFactory createSSLFactory() { - - try { - SSLContext context; - if (sslSecurityProtocol != null) { - String contextStr = sslSecurityProtocol.toString().contains("SSL") ? "SSL" : "TLS"; - context = SSLContext.getInstance(contextStr); - } else if (System.getProperty("java.version").compareTo("1.8") >= 0) { - context = SSLContext.getInstance("TLS"); - } else { - context = SSLContext.getDefault(); - } - - if (validateCertificates) { - context.init(null, null, null); - // For now this check is set as null. - // TODO: Implementation logic for validating client certificate. - } else { - TrustManager[] trustAll = new TrustManager[]{ - new X509TrustManager() { - public X509Certificate[] getAcceptedIssuers() { - return null; - } - - public void checkClientTrusted(X509Certificate[] certs, String authType) { - } - - public void checkServerTrusted(X509Certificate[] certs, String authType) { - } - } - }; - context.init(null, trustAll, null); - } - - return context.getSocketFactory(); - } catch (Exception e) { - throw new RuntimeException("Error setting up SSL socket factory: " + e, e); - } - } - -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import javax.net.ssl.*; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.net.*; +import java.security.cert.X509Certificate; +import java.util.*; +import java.util.Map.Entry; + +/** + * The {@code HttpService} class represents a generic HTTP service at a given + * address ({@code host:port}), accessed using a given protocol scheme + * ({@code http} or {@code https}). + */ +public class HttpService { + // For debugging purposes + private static final boolean VERBOSE_REQUESTS = false; + protected static SSLSecurityProtocol sslSecurityProtocol = null; + + /** + * Boolean flag for validating certificates at either of the sides (client/server). + * If true, then it will check and validate relevant certificates otherwise, in case of false, it will accept all certificates. + * For PROD environment, TRUE is strongly recommended, whereas working in localhost OR development environment, FALSE is used. + * Default Value: TRUE + */ + protected static boolean validateCertificates = true; + + private static SSLSocketFactory sslSocketFactory = createSSLFactory(); + private static String HTTPS_SCHEME = "https"; + private static String HTTP_SCHEME = "http"; + private static List<String> VALID_HOSTS = new ArrayList<>(Arrays.asList("localhost", "127.0.0.1", "::1")); + + private static final HostnameVerifier HOSTNAME_VERIFIER = new HostnameVerifier() { + public boolean verify(String s, SSLSession sslSession) { + if(VALID_HOSTS.contains(s)){ + return true; + } else { + HostnameVerifier hv = HttpsURLConnection.getDefaultHostnameVerifier(); + return hv.verify(s, sslSession); + } + } + }; + + /** + * A variable to hold an optional custom HTTPS handler + */ + protected URLStreamHandler httpsHandler = null; + + /** + * The scheme used to access the service. + */ + protected String scheme = "https"; + + /** + * The host name of the service. + */ + protected String host = "localhost"; + + /** + * The port number of the service. + */ + protected int port = 8089; + + protected Integer connectTimeout = null; + protected Integer readTimeout = null; + + private String prefix = null; + + static Map<String, String> defaultHeader = new HashMap<>() {{ + put("User-Agent", "splunk-sdk-java/1.9.5"); + put("Accept", "*/*"); + }}; + + protected Map<String, String> customHeaders = new HashMap<>(); + + protected SimpleCookieStore cookieStore = new SimpleCookieStore(); + + /** + * Constructs a new {@code HttpService} instance. + */ + public HttpService() { + } + + /** + * Constructs a new {@code HttpService} instance at the given host. + * + * @param host The host name of the service. + */ + public HttpService(String host) { + this.host = host; + } + + /** + * Constructs a new {@code HttpService} instance at the given host and port. + * + * @param host The host name of the service. + * @param port The port number of the service. + */ + public HttpService(String host, int port) { + this.host = host; + this.port = port; + } + + /** + * Constructs a new {@code HttpService} instance using the given host, + * port, and scheme. + * + * @param host The host name of the service. + * @param port The port number of the service. + * @param scheme Scheme for accessing the service ({@code http} or + * {@code https}). + */ + public HttpService(String host, int port, String scheme) { + this.host = host; + this.port = port; + this.scheme = scheme; + } + + /** + * Constructs a new {@code HttpService} instance using the given host, + * port, and scheme, and instructing it to use the specified HTTPS handler. + * + * @param host The host name of the service. + * @param port The port number of the service. + * @param scheme Scheme for accessing the service ({@code http} or + * {@code https}). + * @param httpsHandler A custom URL Stream handler. + */ + public HttpService(String host, int port, String scheme, + URLStreamHandler httpsHandler) { + this.host = host; + this.port = port; + this.scheme = scheme; + this.httpsHandler = httpsHandler; + } + + // Returns the count of arguments in the given {@code args} map. + private static int count(Map<String, Object> args) { + if (args == null) return 0; + return args.size(); + } + + /** + * Issues an HTTP GET request against the service using a given path. + * + * @param path The request path. + * @return The HTTP response. + */ + public ResponseMessage get(String path) { + return send(path, new RequestMessage("GET")); + } + + /** + * Issues an HTTP GET request against the service using a given path and + * query arguments. + * + * @param path The request path. + * @param args The query arguments. + * @return The HTTP response. + */ + public ResponseMessage get(String path, Map<String, Object> args) { + if (count(args) > 0) + path = path + "?" + Args.encode(args); + RequestMessage request = new RequestMessage("GET"); + return send(path, request); + } + + /** + * Returns the host name of this service. + * + * @return The host name. + */ + public String getHost() { + return this.host; + } + + /** + * Returns the port number of this service. + * + * @return The port number. + */ + public int getPort() { + return this.port; + } + + /** + * Sets Custom Headers of this service + * + * @param headers The custom headers. + */ + public void setCustomHeaders(Map<String, String> headers) { + if (Objects.nonNull(headers)) { + customHeaders = headers; + } + } + + /** + * Returns the SSL security protocol of this service. + * + * @return The SSL security protocol. + */ + public static SSLSecurityProtocol getSslSecurityProtocol() { + return sslSecurityProtocol; + } + + /** + * Sets the SSL security protocol of this service. + * @param securityProtocol The SSLSecurityProtocal instance + */ + public static void setSslSecurityProtocol(SSLSecurityProtocol securityProtocol) { + // Only update the SSL_SOCKET_FACTORY if changing protocols + if (sslSecurityProtocol != securityProtocol) { + sslSecurityProtocol = securityProtocol; + sslSocketFactory = createSSLFactory(); + } + } + + /** + * Adds list of Cluster Master Hosts to the list of Valid Hosts for Hostname verification. + * @param searchHeadService Splunk SearchHead Service instance + */ + public static void addClusterMasterURIsToHosts(Service searchHeadService){ + VALID_HOSTS.addAll(searchHeadService.getClusterMasters()); + } + + /** + * Returns the URL prefix of this service, consisting of + * {@code scheme://host[:port]}. + * + * @return The URL prefix. + */ + public String getPrefix() { + if (this.prefix == null) + this.prefix = String.format("%s://%s:%s", + this.scheme, this.host, this.port); + return this.prefix; + } + + /** + * Returns the scheme used by this service. + * + * @return The scheme. + */ + public String getScheme() { + return this.scheme; + } + + /** + * Constructs a fully-qualified URL for this service using a given path. + * + * @param path The path to qualify. + * @return The fully-qualified URL for the service. + */ + public URL getUrl(String path) { + try { + if (HTTPS_SCHEME.equals(getScheme()) && httpsHandler != null) { + // This branch is not currently covered by unit tests as I + // could not figure out a generic way to get the default + // HTTPS handler. + return new URL(getScheme(), getHost(), getPort(), path, + httpsHandler); + } else { + return new URL(getScheme(), getHost(), getPort(), path); + } + } catch (MalformedURLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + /** + * Returns all the stored custom headers + * + * @return customHeaders The custom headers + */ + public Map<String, String> getCustomHeaders() { + return customHeaders; + } + + /** + * Returns all the stored cookies + * + * @return All cookies as in a string in the format key=value; key=value; etc=etc + */ + public String stringifyCookies() { + return cookieStore.getCookies(); + } + + /** + * Adds the passed cookie header to the cookieStore + * + * @param setCookieHeader The result from a getRequestHeader("Set-Cookie") call + */ + public void addCookie(String setCookieHeader) { + cookieStore.add(setCookieHeader); + } + + /** + * Removes all cookies from the cookieStore + */ + public void removeAllCookies() { + cookieStore.removeAll(); + } + + /** + * Returns true if the cookieStore has any Splunk Authorization cookies, false otherwise + * + * @return True if there are cookies, false otherwise + */ + public Boolean hasSplunkAuthCookies() { + return cookieStore.hasSplunkAuthCookie(); + } + + /** + * Returns the connect timeout used by this service. + * + * @return The timeout in milliseconds. + */ + public Integer getConnectTimeout() { + return connectTimeout; + } + + /** + * Sets a specified timeout value, in milliseconds, to be used when opening a communications link. + * + * @param connectTimeout timeout in milliseconds, a timeout of zero is interpreted as an infinite timeout. + */ + public void setConnectTimeout(Integer connectTimeout) { + this.connectTimeout = connectTimeout; + } + + /** + * Returns the read timeout used by this service. + * + * @return The timeout in milliseconds. + */ + public Integer getReadTimeout() { + return readTimeout; + } + + /** + * Sets a specified timeout value, in milliseconds, to be used when reading from a communications link. + * + * @param readTimeout timeout in milliseconds, a timeout of zero is interpreted as an infinite timeout. + */ + public void setReadTimeout(Integer readTimeout) { + this.readTimeout = readTimeout; + } + + /** + * Issues a POST request against the service using a given path. + * + * @param path The request path. + * @return The HTTP response. + */ + public ResponseMessage post(String path) { + return post(path, null); + } + + /** + * Issues a POST request against the service using a given path and + * form arguments. + * + * @param path The request path. + * @param args The form arguments. + * @return The HTTP response. + */ + public ResponseMessage post(String path, Map<String, Object> args) { + RequestMessage request = new RequestMessage("POST"); + request.getHeader().put( + "Content-Type", "application/x-www-form-urlencoded"); + if (count(args) > 0) + request.setContent(Args.encode(args)); + return send(path, request); + } + + /** + * Issues a DELETE request against the service using a given path. + * + * @param path The request path. + * @return The HTTP response. + */ + public ResponseMessage delete(String path) { + RequestMessage request = new RequestMessage("DELETE"); + return send(path, request); + } + + /** + * Issues a DELETE request against the service using a given path + * and query arguments. + * + * @param path The request path. + * @param args The query arguments. + * @return The HTTP response. + */ + public ResponseMessage delete(String path, Map<String, Object> args) { + if (count(args) > 0) + path = path + "?" + Args.encode(args); + RequestMessage request = new RequestMessage("DELETE"); + return send(path, request); + } + + /** + * Opens a socket to this service. + * + * @return The socket. + * @throws IOException + */ + Socket open() throws IOException { + if (this.scheme.equals("https")) { + return sslSocketFactory.createSocket(this.host, this.port); + } + return new Socket(this.host, this.port); + } + + /** + * Issue an HTTP request against the service using a given path and + * request message. + * + * @param path The request path. + * @param request The request message. + * @return The HTTP response. + */ + public ResponseMessage send(String path, RequestMessage request) { + // Construct a full URL to the resource + URL url = getUrl(path); + // Create and initialize the connection object + HttpURLConnection cn; + try { + cn = (HttpURLConnection) url.openConnection(); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + if (cn instanceof HttpsURLConnection cnInst) { + cnInst.setSSLSocketFactory(sslSocketFactory); + cnInst.setHostnameVerifier(HOSTNAME_VERIFIER); + } + cn.setUseCaches(false); + cn.setAllowUserInteraction(false); + cn.setConnectTimeout(connectTimeout == null ? 0 : connectTimeout); + cn.setReadTimeout(readTimeout == null ? 0 : readTimeout); + + // Set the request method + String method = request.getMethod(); + try { + cn.setRequestMethod(method); + } catch (ProtocolException e) { + throw new RuntimeException(e.getMessage(), e); + } + + // Add headers from request message + Map<String, String> header = request.getHeader(); + for (Entry<String, String> entry : header.entrySet()) + cn.setRequestProperty(entry.getKey(), entry.getValue()); + // Add default headers that were absent from the request message + for (Entry<String, String> entry : defaultHeader.entrySet()) { + String key = entry.getKey(); + if (header.containsKey(key)) continue; + cn.setRequestProperty(key, entry.getValue()); + } + // Add Custom Headers + for (Entry<String, String> entry: customHeaders.entrySet()) { + String key = entry.getKey(); + if (!header.containsKey(key)) { + cn.setRequestProperty(key, entry.getValue()); + } + } + + // Add cookies to header + cn.setRequestProperty("Cookie", cookieStore.getCookies()); + + // Write out request content, if any + try { + Object content = request.getContent(); + if (content != null) { + cn.setDoOutput(true); + OutputStream stream = cn.getOutputStream(); + OutputStreamWriter writer = new OutputStreamWriter(stream, "UTF-8"); + writer.write((String) content); + writer.close(); + } + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + + if (VERBOSE_REQUESTS) { + System.out.format("%s %s => ", method, url.toString()); + } + + // Execute the request + try { + cn.connect(); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + + int status; + try { + status = cn.getResponseCode(); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + + InputStream input = null; + try { + input = status >= 400 + ? cn.getErrorStream() + : cn.getInputStream(); + } catch (IOException e) { + assert (false); + } + + // If user session has expired check for 'autologin' flag to either re-login or throw HTTPException + if(this instanceof Service && status == 401){ + return new ResponseMessage(401, input); + } + + // Add cookies to cookie Store + Map<String, List<String>> headers = cn.getHeaderFields(); + if (headers.containsKey("Set-Cookie")) { + for (String cookieHeader : headers.get("Set-Cookie")) { + if (cookieHeader != null && cookieHeader.length() > 0) + cookieStore.add(cookieHeader); + } + } + + ResponseMessage response = new ResponseMessage(status, input); + + if (VERBOSE_REQUESTS) { + System.out.format("%d\n", status); + if (method.equals("POST")) { + System.out.println(" " + request.getContent()); + } + } + + if (status >= 400) + throw HttpException.create(response); + + return response; + } + + public static void setSSLSocketFactory(SSLSocketFactory sslSocketFactory) { + if (sslSocketFactory == null) + throw new IllegalArgumentException("The sslSocketFactory cannot be null."); + HttpService.sslSocketFactory = sslSocketFactory; + } + + public static SSLSocketFactory getSSLSocketFactory() { + return HttpService.sslSocketFactory; + } + + public static void setValidateCertificates(boolean validateCertificate) { + // update the SSL_SOCKET_FACTORY if validateCertificates flag is changed + if (validateCertificates != validateCertificate) { + validateCertificates = validateCertificate; + sslSocketFactory = createSSLFactory(); + } + } + + public static SSLSocketFactory createSSLFactory() { + + try { + SSLContext context; + if (sslSecurityProtocol != null) { + String contextStr = sslSecurityProtocol.toString().contains("SSL") ? "SSL" : "TLS"; + context = SSLContext.getInstance(contextStr); + } else if (System.getProperty("java.version").compareTo("1.8") >= 0) { + context = SSLContext.getInstance("TLS"); + } else { + context = SSLContext.getDefault(); + } + + if (validateCertificates) { + context.init(null, null, null); + // For now this check is set as null. + // TODO: Implementation logic for validating client certificate. + } else { + TrustManager[] trustAll = new TrustManager[]{ + new X509TrustManager() { + public X509Certificate[] getAcceptedIssuers() { + return null; + } + + public void checkClientTrusted(X509Certificate[] certs, String authType) { + } + + public void checkServerTrusted(X509Certificate[] certs, String authType) { + } + } + }; + context.init(null, trustAll, null); + } + + return context.getSocketFactory(); + } catch (Exception e) { + throw new RuntimeException("Error setting up SSL socket factory: " + e, e); + } + } + +} + diff --git a/splunk/src/main/java/com/splunk/Index.java b/splunk/src/main/java/com/splunk/Index.java index 4665098a..18a9adbd 100644 --- a/splunk/src/main/java/com/splunk/Index.java +++ b/splunk/src/main/java/com/splunk/Index.java @@ -1,1112 +1,1112 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.IOException; -import java.io.OutputStream; -import java.net.Socket; -import java.util.Date; - -/** - * The {@code Index} class represents an index. - */ -public class Index extends Entity { - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The index endpoint. - */ - Index(Service service, String path) { - super(service, path); - } - - /** - * Creates a writable socket to this index. - * - * @return The writable socket. - * @throws IOException Throws exception if fails to write socket. - */ - public Socket attach() throws IOException { - Receiver receiver = service.getReceiver(); - return receiver.attach(getName()); - } - - /** - * Writes events to this index, reusing the connection. - * This method passes an output stream connected to the index to the - * {@code run} method of the {@code ReceiverBehavior} object, then handles - * setting up and tearing down the socket. - * <p> - * For an example of how to use this method, see - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" target="_blank">How to - * get data into Splunk</a> on - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" - * target="_blank">dev.splunk.com</a>. - * - * @param behavior The body of a {@code try} block as an anonymous - * implementation of the {@code ReceiverBehavior} interface. - * @throws IOException The IOException class - */ - public void attachWith(ReceiverBehavior behavior) throws IOException { - try (Socket socket = attach(); - OutputStream output = socket.getOutputStream();) { - behavior.run(output); - output.flush(); - } - } - - /** - * Creates a writable socket to this index. - * - * @param args Optional arguments for this stream. Valid parameters are: - * "host", "host_regex", "source", and "sourcetype". - * @return The socket. - * @throws IOException The IOException class - */ - public Socket attach(Args args) throws IOException { - Receiver receiver = service.getReceiver(); - return receiver.attach(getName(), args); - } - - /** - * Cleans this index, which removes all events from it. - * - * @param maxSeconds The maximum number of seconds to wait before returning. - * A value of -1 means to wait forever. - * @throws SplunkException If cleaning timed out or - * if the thread was interrupted. - * @return This index. - */ - public Index clean(int maxSeconds) { - Args saved = new Args(); - saved.put("maxTotalDataSizeMB", getMaxTotalDataSizeMB()); - saved.put("frozenTimePeriodInSecs", getFrozenTimePeriodInSecs()); - try { - Args reset = new Args(); - reset.put("maxTotalDataSizeMB", "1"); - reset.put("frozenTimePeriodInSecs", "1"); - update(reset); - rollHotBuckets(); - - long startTime = System.currentTimeMillis(); - long endTime = startTime + (maxSeconds * 1000); - while (true) { - long timeLeft = endTime - System.currentTimeMillis(); - if (timeLeft <= 0) { - break; - } - Thread.sleep(Math.min(1000, timeLeft)); - - if (this.getTotalEventCount() == 0) { - return this; - } - refresh(); - } - - throw new SplunkException(SplunkException.TIMEOUT, - "Index cleaning timed out"); - } - catch (InterruptedException e) - { - SplunkException f = new SplunkException( - SplunkException.INTERRUPTED, - "Index cleaning interrupted."); - f.initCause(e); - throw f; - } - finally { - update(saved); - } - } - - /** - * Indicates whether the data retrieved from this index has been - * UTF8-encoded. - * - * @return {@code true} if the retrieved data is in UTF8, {@code false} if - * not. - */ - public boolean getAssureUTF8() { - return getBoolean("assureUTF8"); - } - - /** - * Returns the total size of all bloom filter files. - * - * @return The total size of all bloom filter files, in KB. - */ - public int getBloomfilterTotalSizeKB() { - return getInteger("bloomfilterTotalSizeKB", 0); - } - - /** - * Returns the suggested size of the .tsidx file for the bucket rebuild - * process. - * Valid values are: "auto", a positive integer, or a positive - * integer followed by "KB", "MB", or "GB". - * - * @return The suggested size of the .tsidx file for the bucket rebuild - * process. - */ - public String getBucketRebuildMemoryHint() { - return getString("bucketRebuildMemoryHint"); - } - - /** - * Returns the absolute file path to the cold database for this index. - * This value may contain shell expansion terms. - * - * @return The absolute file path to the cold database, or {@code null} if - * not specified. - */ - public String getColdPath() { - return getString("coldPath", null); - } - - /** - * Returns the expanded absolute file path to the cold database for this - * index. - * - * @return The expanded absolute file path to the cold database, or - * {@code null} if not specified. - */ - public String getColdPathExpanded() { - return getString("coldPath_expanded", null); - } - - /** - * Returns the frozen archive destination path for this index. - * - * @return The frozen archive destination path, or {@code null} if not - * specified. - */ - public String getColdToFrozenDir() { - return getString("coldToFrozenDir", null); - } - - /** - * Returns the path to the archiving script. - * <p>For more info about archiving scripts, see the - * <a href="http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTindex#POST_data.2Findexes" - * target="_blank">POST data/indexes endpoint</a> in the REST API - * documentation. - * @see #getColdToFrozenDir - * - * @return The archiving script, or {@code null} if not specified. - */ - public String getColdToFrozenScript() { - return getString("coldToFrozenScript", null); - } - - /** - * Indicates whether raw data is compressed. - * - * @deprecated Splunk always compresses raw data. - * @return {@code true} if raw data is compressed, {@code false} if not. - */ - public boolean getCompressRawdata() { - return getBoolean("compressRawdata"); - } - - /** - * Returns the current size of this index. - * - * @return The current size of the index, in MB. - */ - public int getCurrentDBSizeMB() { - return getInteger("currentDBSizeMB"); - } - - /** - * Return the default index name of the Splunk instance. - * - * @return The default index name. - */ - public String getDefaultDatabase() { - return getString("defaultDatabase"); - } - - /** - * Returns whether asynchronous "online fsck" bucket repair is enabled. - * <p> - * When this feature is enabled, you don't have to wait for buckets to be - * repaired before starting Splunk, but you might notice a slight - * degradation in performance as a result. - * @return {@code true} if bucket repair is enabled, {@code false} if - * not. - */ - public boolean getEnableOnlineBucketRepair() { - - return getBoolean("enableOnlineBucketRepair"); - } - - /** - * Indicates whether real-time search is enabled for this index. - * - * @return {@code true} if real-time search is enabled, {@code false} if - * not. - */ - public boolean getEnableRealtimeSearch() { - return getBoolean("enableRealtimeSearch"); - } - - /** - * Returns the maximum age for a bucket, after which the data in this index - * rolls to frozen. If archiving is necessary for frozen data, see the - * {@code coldToFrozen} attributes. - * - * @return The maximum age, in seconds, after which data rolls to frozen. - */ - public int getFrozenTimePeriodInSecs() { - return getInteger("frozenTimePeriodInSecs"); - } - - /** - * Returns the absolute path to both hot and warm buckets for this index. - * This value may contain shell expansion terms. - * - * @return This index's absolute path to both hot and warm buckets, or - * {@code null} if not specified. - */ - public String getHomePath() { - return getString("homePath", null); - } - - /** - * Returns the expanded absolute path to both hot and warm buckets for this - * index. - * - * @return The expanded absolute path to both hot and warm buckets, or - * {@code null} if not specified. - */ - public String getHomePathExpanded() { - return getString("homePath_expanded", null); - } - - /** - * Returns the index thread for this index. - * - * @return The index thread. - */ - public String getIndexThreads() { - return getString("indexThreads"); - } - - /** - * Returns the last initialization time for this index. - * - * @return The last initialization time, or {@code null} if not specified. - */ - public String getLastInitTime() { - return getString("lastInitTime", null); - } - - /** - * Returns the time that indicates a bucket age. When a warm or cold bucket - * is older than this, Splunk does not create or rebuild its bloomfilter. - * The valid format is <i>number</i> followed by a time unit ("s", "m", "h", - * or "d"). For example, "30d" for 30 days. - * @return String value - */ - public String getMaxBloomBackfillBucketAge() { - return getString("maxBloomBackfillBucketAge", null); - } - - /** - * Returns the maximum number of concurrent optimize processes that - * can run against a hot bucket for this index. - * - * @return The maximum number of concurrent optimize processes. - */ - public int getMaxConcurrentOptimizes() { - return getInteger("maxConcurrentOptimizes"); - } - - /** - * Returns the maximum data size before triggering a roll from hot to warm - * buckets for this index. - * - * @return The maximum data size, in MB, or "auto" (which means 750MB), or - * "auto_high_volume" (which means 10GB on a 64-bit system, or 1GB on a - * 32-bit system). - * @see #setMaxDataSize - */ - public String getMaxDataSize() { - return getString("maxDataSize"); - } - - /** - * Returns the maximum number of hot buckets that can exist for this index. - * - * @return The maximum number of hot buckets or "auto" (which means 3). - */ - public String getMaxHotBuckets() { - return getString("maxHotBuckets"); - } - - /** - * Returns the maximum lifetime of a hot bucket for this index. - * If a hot bucket exceeds this value, Splunk rolls it to warm. - * A value of 0 means an infinite lifetime. - * - * @return The hot bucket's maximum lifetime, in seconds. - */ - public int getMaxHotIdleSecs() { - return getInteger("maxHotIdleSecs"); - } - - /** - * Returns the upper bound of the target maximum timespan of - * hot and warm buckets for this index. - * - * @return The upper bound of the target maximum timespan, in seconds. - */ - public int getMaxHotSpanSecs() { - return getInteger("maxHotSpanSecs"); - } - - /** - * Returns the amount of memory to allocate for buffering - * a single .tsidx file into memory before flushing to disk. - * - * @return The amount of memory, in MB. - */ - public int getMaxMemMB() { - return getInteger("maxMemMB"); - } - - /** - * Returns the maximum number of unique lines that are allowed - * in a bucket's .data files for this index. A value of 0 means infinite - * lines. - * - * @return The maximum number of unique lines. - */ - public int getMaxMetaEntries() { - return getInteger("maxMetaEntries"); - } - - /** - * Returns the maximum number of concurrent helper processes for this index. - * - * @return The maximum number of concurrent helper processes. - */ - public int getMaxRunningProcessGroups() { - return getInteger("maxRunningProcessGroups", 0); - } - - /** - * Returns the maximum time attribute for this index. - * - * @return The maximum time attribute, or {@code null} if not specified. - */ - public Date getMaxTime() { - return getDate("maxTime", null); - } - - /** - * Returns the maximum size of this index. If an index - * grows larger than this value, the oldest data is frozen. - * - * @return The maximum index size, in MB. - */ - public int getMaxTotalDataSizeMB() { - return getInteger("maxTotalDataSizeMB"); - } - - /** - * Returns the upper limit, in seconds, for how long an event can sit in a - * raw slice. This value applies only when replication is enabled for this - * index, and is ignored otherwise.<br> - * If there are any acknowledged events sharing this raw slice, the - * {@code MaxTimeUnreplicatedWithAcksparamater} applies instead. - * @see #getMaxTimeUnreplicatedWithAcks - * @return int value - */ - public int getMaxTimeUnreplicatedNoAcks() { - return getInteger("maxTimeUnreplicatedNoAcks"); - } - - /** - * Returns the upper limit, in seconds, for how long an event can sit - * unacknowledged in a raw slice. This value only applies when indexer - * acknowledgement is enabled on forwarders and replication is enabled with - * clustering. - * @return int value - */ - public int getMaxTimeUnreplicatedWithAcks() { - return getInteger("maxTimeUnreplicatedWithAcks"); - } - - /** - * Returns the maximum number of warm buckets for this index. If this - * value is exceeded, the warm buckets with the lowest value for their - * latest times are moved to cold. - * - * @return The maximum number of warm buckets. - */ - public int getMaxWarmDBCount() { - return getInteger("maxWarmDBCount"); - } - - /** - * Returns the memory pool for this index. - * - * @return The memory pool, in MB or "auto". - */ - public String getMemPoolMB() { - return getString("memPoolMB"); - } - - /** - * Returns the frequency at which Splunkd forces a filesystem sync while - * compressing journal slices for this index. - * <p> - * A value of "disable" disables this feature completely, while a value of 0 - * forces a file-system sync after completing compression of every journal - * slice. - * - * @return The file-system sync frequency, as an integer or "disable". - */ - public String getMinRawFileSyncSecs() { - return getString("minRawFileSyncSecs"); - } - - /** - * Returns the minimum time attribute for this index. - * - * @return The minimum time attribute, or {@code null} if not specified. - */ - public Date getMinTime() { - return getDate("minTime", null); - } - - /** - * Returns the number of hot buckets that were created for this index. - * - * @return The number of hot buckets. - */ - public int getNumHotBuckets() { - return getInteger("numHotBuckets", 0); - } - - /** - * Returns the number of warm buckets created for this index. - * - * @return The number of warm buckets. - */ - public int getNumWarmBuckets() { - return getInteger("numWarmBuckets", 0); - } - - /** - * Returns the number of bloom filters created for this index. - * - * @return The number of bloom filters. - */ - public int getNumBloomfilters() { - return getInteger("numBloomfilters", 0); - } - - /** - * Returns the frequency at which metadata is for partially synced (synced - * in-place) for this index. A value of 0 disables partial syncing, so - * metadata is only synced on the {@code ServiceMetaPeriod} interval. - * @see #getServiceMetaPeriod - * @see #setServiceMetaPeriod - * - * @return The metadata sync interval, in seconds. - */ - public int getPartialServiceMetaPeriod() { - return getInteger("partialServiceMetaPeriod"); - } - - /** - * Returns the future event-time quarantine for this index. Events - * that are newer than now plus this value are quarantined. - * - * @return The future event-time quarantine, in seconds. - */ - public int getQuarantineFutureSecs() { - return getInteger("quarantineFutureSecs"); - } - - /** - * Returns the past event-time quarantine for this index. Events - * that are older than now minus this value are quarantined. - * - * @return The past event-time quarantine, in seconds. - */ - public int getQuarantinePastSecs() { - return getInteger("quarantinePastSecs"); - } - - /** - * Returns the target uncompressed size of individual raw slices in the - * rawdata journal for this index. - * - * @return The target uncompressed size, in bytes. - */ - public int getRawChunkSizeBytes() { - return getInteger("rawChunkSizeBytes"); - } - - /** - * Returns the frequency to check for the need to create a new hot bucket - * and the need to roll or freeze any warm or cold buckets for this index. - * - * @return The check frequency, in seconds. - */ - public int getRotatePeriodInSecs() { - return getInteger("rotatePeriodInSecs"); - } - - /** - * Returns the frequency at which metadata is synced to disk for this index. - * - * @return The meta data sync frequency, in seconds. - */ - public int getServiceMetaPeriod() { - return getInteger("serviceMetaPeriod"); - } - - /** - * Returns a list of indexes that suppress "index missing" messages. - * - * @return A comma-separated list of indexes. - */ - public String getSuppressBannerList() { - return getString("suppressBannerList", null); - } - - /** - * Returns the number of events that trigger the indexer to sync events. - * This value is global, not a per-index value. - * - * @return The number of events that trigger the indexer to sync events. - */ - public int getSync() { - return getInteger("sync"); - } - - /** - * Indicates whether the sync operation is called before the file - * descriptor is closed on metadata updates. - * - * @return {@code true} if the sync operation is called before the file - * descriptor is closed on metadata updates, {@code false} if not. - */ - public boolean getSyncMeta() { - return getBoolean("syncMeta"); - } - - /** - * Returns the absolute path to the thawed index for this index. This value - * may contain shell expansion terms. - * - * @return The absolute path to the thawed index, or {@code null} if not - * specified. - */ - public String getThawedPath() { - return getString("thawedPath", null); - } - - /** - * Returns the expanded absolute path to the thawed index for this index. - * - * @return The expanded absolute path to the thawed index, or {@code null} - * if not specified. - */ - public String getThawedPathExpanded() { - return getString("thawedPath_expanded", null); - } - - /** - * Returns the frequency at which Splunk checks for an index throttling - * condition. - * - * @return The frequency of the throttling check, in seconds. - */ - public int getThrottleCheckPeriod() { - return getInteger("throttleCheckPeriod"); - } - - /** - * Returns the total event count for this index. - * - * @return The total event count. - */ - public int getTotalEventCount() { - return getInteger("totalEventCount"); - } - - /** - * Indicates whether this index is an internal index. - * - * @return {@code true} if this index is an internal index, {@code false} - * if not. - */ - public boolean isInternal() { - return getBoolean("isInternal"); - } - - /** - * Performs rolling hot buckets for this index. - */ - public void rollHotBuckets() { - ResponseMessage response = service.post(path + "/roll-hot-buckets"); - assert(response.getStatus() == 200); - } - - /** - * Sets whether the data retrieved from this index is UTF8-encoded. - * <p> - * <b>Note:</b> Indexing performance degrades when this parameter is set to - * {@code true}. - * - * In Splunk 5.0 and later, this is a global property and cannot be set on - * a per-index basis. - * - * @param assure {@code true} to ensure UTF8 encoding, {@code false} if not. - */ - public void setAssureUTF8(boolean assure) { - setCacheValue("assureUTF8", assure); - } - - /** - * Sets the number of events that make up a block for block signatures. A - * value of 100 is recommended. A value of 0 disables block signing for this - * index. - * - * @param value The event count for block signing. - */ - public void setBlockSignSize(int value) { - setCacheValue("blockSignSize", value); - } - - - /** - * Sets the suggested size of the .tsidx file for the bucket rebuild - * process. - * - * Valid values are: "auto", a positive integer, or a positive - * integer followed by "KB", "MB", or "GB". - * - * @param value The suggested size of the .tsidx file for the bucket rebuild - * process. - */ - public void setBucketRebuildMemoryHint(String value) { - setCacheValue("bucketRebuildMemoryHint", value); - } - - /** - * Sets the destination path for the frozen archive, where Splunk - * automatically puts frozen buckets. The bucket freezing policy is as - * follows: - * <ul><li><b>New-style buckets (4.2 and later):</b> All files are removed - * except the raw data. To thaw frozen buckets, run {@code Splunk rebuild - * <bucket dir>} on the bucket, then move the buckets to the thawed - * directory.</li> - * <li><b>Old-style buckets (4.1 and earlier):</b> gzip all the .data and - * .tsidx files. To thaw frozen buckets, gunzip the zipped files and move - * the buckets to the thawed directory.</li></ul> - * If both {@code coldToFrozenDir} and {@code coldToFrozenScript} are - * specified, {@code coldToFrozenDir} takes precedence. - * @see #setColdToFrozenScript - * @see #getColdToFrozenScript - * - * @param destination The destination path for the frozen archive. - */ - public void setColdToFrozenDir(String destination) { - setCacheValue("coldToFrozenDir", destination); - } - - /** - * Sets the path to the archiving script. - * <p>For more info about archiving scripts, see the - * <a href="http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTindex#POST_data.2Findexes" - * target="_blank">POST data/indexes endpoint</a> in the REST API - * documentation. - * @see #setColdToFrozenDir - * @see #getColdToFrozenDir - * - * @param script The path to the archiving script. - */ - public void setColdToFrozenScript(String script) { - setCacheValue("coldToFrozenScript", script); - } - - /** - * Sets whether asynchronous "online fsck" bucket repair is enabled. - * <p> - * When this feature is enabled, you don't have to wait for buckets to be - * repaired before starting Splunk, but you might notice a slight - * degradation in performance as a result. - * - * @param value {@code true} to enable online bucket repair, {@code false} - * if not. - */ - public void setEnableOnlineBucketRepair(boolean value) { - setCacheValue("enableOnlineBucketRepair", value); - } - - /** - * Sets the maximum age for a bucket, after which the data in this index - * rolls to frozen. Freezing data removes it from the index. To archive - * data, see {@code coldToFrozenDir} and {@code coldToFrozenScript}. - * @see #setColdToFrozenDir - * @see #setColdToFrozenScript - * - * @param seconds The time, in seconds, after which indexed data rolls to - * frozen. - */ - public void setFrozenTimePeriodInSecs(int seconds) { - setCacheValue("frozenTimePeriodInSecs", seconds); - } - - /** - * Sets the time that indicates a bucket age. When a warm or cold bucket - * is older than this, Splunk does not create or rebuild its bloomfilter. - * The valid format is <i>number</i> followed by a time unit ("s", "m", "h", - * or "d"). For example, "30d" for 30 days. - * @param time The time that indicates a bucket age. - */ - public void setMaxBloomBackfillBucketAge(String time) { - setCacheValue("maxBloomBackfillBucketAge", time); - } - - /** - * Sets the number of concurrent optimize processes that can run against - * a hot bucket for this index. - * - * @param processes The number of concurrent optimize processes. - */ - public void setMaxConcurrentOptimizes(int processes) { - setCacheValue("maxConcurrentOptimizes", processes); - } - - /** - * Sets the maximum data size before triggering a roll from hot to warm - * buckets for this index. You can also specify a value to let Splunk - * autotune this parameter: use "auto_high_volume" for high-volume indexes - * (such as the main index, or one that gets over 10GB of data per day); - * otherwise, use "auto". - * @see #getMaxDataSize - * - * @param size The size in MB, or an autotune string. - */ - public void setMaxDataSize(String size) { - setCacheValue("maxDataSize", size); - } - - /** - * Sets the maximum number of hot buckets that can exist per index. - * <p> - * When {@code maxHotBuckets} is exceeded, Splunk rolls the least recently - * used (LRU) hot bucket to warm. Both normal hot buckets and quarantined - * hot buckets count towards this total. This setting operates independently - * of {@code MaxHotIdleSecs}, which can also cause hot buckets to roll. - * @see #setMaxHotIdleSecs - * @see #getMaxHotIdleSecs - * - * @param size The maximum number of hot buckets per index, or an 'auto' string. - */ - public void setMaxHotBuckets(String size) { - setCacheValue("maxHotBuckets", size); - } - - /** - * Sets the maximum lifetime of a hot bucket for this index. - * <p> - * If a hot bucket exceeds this value, Splunk rolls it to warm. - * This setting operates independently of {@code MaxHotBuckets}, which can - * also cause hot buckets to roll. - * @see #setMaxHotBuckets - * @see #getMaxHotBuckets - * - * @param seconds The hot bucket's maximum lifetime, in seconds. A value of - * 0 means an infinite lifetime. - */ - public void setMaxHotIdleSecs(int seconds) { - setCacheValue("maxHotIdleSecs", seconds); - } - - /** - * Sets the upper bound of the target maximum timespan of hot and warm - * buckets for this index. - * <p> - * <b>Note:</b> If you set this too small, you can get an explosion of - * hot and warm buckets in the file system. The system sets a lower bound - * implicitly for this parameter at 3600, but this advanced parameter should - * be set with care and understanding of the characteristics of your data. - * - * @param seconds The upper bound of the target maximum timespan, in - * seconds. - */ - public void setMaxHotSpanSecs(int seconds) { - setCacheValue("maxHotSpanSecs", seconds); - } - - /** - * Sets the amount of memory allocated for buffering a single .tsidx - * file before flushing to disk. - * - * @param memory The amount of memory, in MB. - */ - public void setMaxMemMB(int memory) { - setCacheValue("maxMemMB", memory); - } - - /** - * Sets the maximum number of unique lines in .data files in a bucket, which - * may help to reduce memory consumption. - * <p> - * If this value is exceeded, a hot bucket is rolled to prevent a further - * increase. If your buckets are rolling due to Strings.data hitting this - * limit, the culprit might be the "punct" field in your data. If you don't - * use that field, it might be better to just disable this (see the - * props.conf.spec in $SPLUNK_HOME/etc/system/README). - * - * @param entries The maximum number of unique lines. A value of 0 means - * infinite lines. - */ - public void setMaxMetaEntries(int entries) { - setCacheValue("maxMetaEntries", entries); - } - - - /** - * Sets the upper limit for how long an event can sit in a - * raw slice. This value applies only when replication is enabled for this - * index, and is ignored otherwise.<br> - * If there are any acknowledged events sharing this raw slice, the - * {@code MaxTimeUnreplicatedWithAcksparamater} applies instead. - * - * @param value The upper limit, in seconds. A value of 0 disables this - * setting. - */ - public void setMaxTimeUnreplicatedNoAcks(int value) { - setCacheValue("maxTimeUnreplicatedNoAcks", value); - } - - /** - * Sets the upper limit for how long an event can sit unacknowledged in a - * raw slice. This value only applies when indexer acknowledgement is - * enabled on forwarders and replication is enabled with clustering. - * <p> - * This number should not exceed the acknowledgement timeout configured on - * any forwarder. - * - * @param value The upper limit, in seconds. A value of 0 disables this - * setting (not recommended). - */ - public void setMaxTimeUnreplicatedWithAcks(int value) { - setCacheValue("maxTimeUnreplicatedWithAcks", value); - } - - /** - * Sets the maximum size for this index. If an index grows larger than this - * value, the oldest data is frozen. - * - * @param size The maximum index size, in MB. - */ - public void setMaxTotalDataSizeMB(int size) { - setCacheValue("maxTotalDataSizeMB", size); - } - - /** - * Sets the maximum number of warm buckets. If this number is exceeded, - * the warm buckets with the lowest value for their latest times will be - * moved to cold. - * - * @param buckets The maximum number of warm buckets. - */ - public void setMaxWarmDBCount(int buckets) { - setCacheValue("maxWarmDBCount", buckets); - } - - /** - * Sets the frequency at which Splunkd forces a file system sync while - * compressing journal slices for this index. A value of "disable" disables - * this feature completely, while a value of 0 forces a file-system sync - * after completing compression of every journal slice. - * - * @param frequency The file-system sync frequency, as an integer or - * "disable". - */ - public void setMinRawFileSyncSecs(String frequency) { - setCacheValue("minRawFileSyncSecs", frequency); - } - - /** - * Sets the frequency at which metadata is for partially synced (synced - * in-place) for this index. A value of 0 disables partial syncing, so - * metadata is only synced on the {@code ServiceMetaPeriod} interval. - * @see #setServiceMetaPeriod - * @see #getServiceMetaPeriod - * - * @param frequency The metadata sync interval, in seconds. - */ - public void setPartialServiceMetaPeriod(int frequency) { - setCacheValue("partialServiceMetaPeriod", frequency); - } - - /** - * Sets a quarantine for events that are timestamped in the future to help - * prevent main hot buckets from being polluted with fringe events. Events - * that are newer than "now" plus this value are quarantined. - * - * @param window The future event-time quarantine, in seconds. - */ - public void setQuarantineFutureSecs(int window) { - setCacheValue("quarantineFutureSecs", window); - } - - /** - * Sets a quarantine for events that are timestamped in the past to help - * prevent main hot buckets from being polluted with fringe events. Events - * that are older than "now" plus this value are quarantined. - * - * @param window The past event-time quarantine, in seconds. - */ - public void setQuarantinePastSecs(int window) { - setCacheValue("quarantinePastSecs", window); - } - - /** - * Sets the target uncompressed size of individual raw slices in the rawdata - * journal for this index. - * <p> - * This parameter only specifies a target chunk size. The actual chunk size - * might be slightly larger by an amount proportional to an individual event - * size. - * <blockquote> - * <b>WARNING:</b> This is an advanced parameter. Only change it if you are - * instructed to do so by Splunk Support. - * </blockquote> - * @param size The target uncompressed size, in bytes. (0 is not a valid - * value--if 0 is used, this parameter is set to the default value.) - */ - public void setRawChunkSizeBytes(int size) { - setCacheValue("rawChunkSizeBytes", size); - } - - /** - * Sets the frequency to check for the need to create a new hot bucket and - * the need to roll or freeze any warm or cold buckets for this index. - * - * @param frequency The check frequency, in seconds. - */ - public void setRotatePeriodInSecs(int frequency) { - setCacheValue("rotatePeriodInSecs", frequency); - } - - /** - * Sets the frequency at which metadata is synced to disk for this index. - * - * @param frequency The meta data sync frequency, in seconds. - */ - public void setServiceMetaPeriod(int frequency) { - setCacheValue("serviceMetaPeriod", frequency); - } - - /** - * Sets whether the sync operation is called before the file descriptor is - * closed on metadata updates. - * <p> - * This functionality improves the integrity of metadata files, especially - * with regard to operating system crashes and machine failures. - * <blockquote> - * <b>WARNING:</b> This is an advanced parameter. Only change it if you are - * instructed to do so by Splunk Support. - * </blockquote> - * @param sync {@code true} to call the sync operation before the file - * descriptor is closed on metadata updates, {@code false} if not. - */ - public void setSyncMeta(boolean sync) { - setCacheValue("syncMeta", sync); - } - - /** - * Sets the frequency at which Splunk checks for an index throttling - * condition. - * - * @param frequency The frequency of the throttling check, in seconds. - */ - public void setThrottleCheckPeriod(int frequency) { - setCacheValue("throttleCheckPeriod", frequency); - } - - /** - * Submits an event to this index through an HTTP POST request. - * - * @param data The event data to post. - */ - public void submit(String data) { - Receiver receiver = service.getReceiver(); - receiver.submit(getName(), data); - } - - /** - * Submits an event to this index through an HTTP POST request. - * - * @param args Optional arguments for this request. Valid parameters are: - * "host", "host_regex", "source", and "sourcetype". - * @param data The event data to post. - */ - public void submit(Args args, String data) { - Receiver receiver = service.getReceiver(); - receiver.submit(getName(), args, data); - } - - /** - * Uploads a file to this index as an event stream. - * <p> - * <b>Note:</b> This file must be directly accessible by the Splunk server. - * - * @param filename The path and filename. - */ - public void upload(String filename) { - EntityCollection<Upload> uploads = service.getUploads(); - Args args = new Args("index", getName()); - uploads.create(filename, args); - } - - /** - * Uploads a file to this index as an event stream. - * <p> - * <b>Note:</b> This file must be directly accessible by the Splunk server. - * - * @param filename The path and filename. - * - * @param args Optional arguments for this request. Valid parameters are: - * "host", "sourcetype", "rename-source". More found at: - * http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTinput#data.2Finputs.2Foneshot - * - */ - public void upload(String filename, Args args) { - EntityCollection<Upload> uploads = service.getUploads(); - if(args.containsKey("index")){ - throw new IllegalArgumentException("The 'index' parameter cannot be passed to an index's oneshot upload."); - } - args.add("index", getName()); - uploads.create(filename, args); - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.IOException; +import java.io.OutputStream; +import java.net.Socket; +import java.util.Date; + +/** + * The {@code Index} class represents an index. + */ +public class Index extends Entity { + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The index endpoint. + */ + Index(Service service, String path) { + super(service, path); + } + + /** + * Creates a writable socket to this index. + * + * @return The writable socket. + * @throws IOException Throws exception if fails to write socket. + */ + public Socket attach() throws IOException { + Receiver receiver = service.getReceiver(); + return receiver.attach(getName()); + } + + /** + * Writes events to this index, reusing the connection. + * This method passes an output stream connected to the index to the + * {@code run} method of the {@code ReceiverBehavior} object, then handles + * setting up and tearing down the socket. + * <p> + * For an example of how to use this method, see + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" target="_blank">How to + * get data into Splunk</a> on + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" + * target="_blank">dev.splunk.com</a>. + * + * @param behavior The body of a {@code try} block as an anonymous + * implementation of the {@code ReceiverBehavior} interface. + * @throws IOException The IOException class + */ + public void attachWith(ReceiverBehavior behavior) throws IOException { + try (Socket socket = attach(); + OutputStream output = socket.getOutputStream();) { + behavior.run(output); + output.flush(); + } + } + + /** + * Creates a writable socket to this index. + * + * @param args Optional arguments for this stream. Valid parameters are: + * "host", "host_regex", "source", and "sourcetype". + * @return The socket. + * @throws IOException The IOException class + */ + public Socket attach(Args args) throws IOException { + Receiver receiver = service.getReceiver(); + return receiver.attach(getName(), args); + } + + /** + * Cleans this index, which removes all events from it. + * + * @param maxSeconds The maximum number of seconds to wait before returning. + * A value of -1 means to wait forever. + * @throws SplunkException If cleaning timed out or + * if the thread was interrupted. + * @return This index. + */ + public Index clean(int maxSeconds) { + Args saved = new Args(); + saved.put("maxTotalDataSizeMB", getMaxTotalDataSizeMB()); + saved.put("frozenTimePeriodInSecs", getFrozenTimePeriodInSecs()); + try { + Args reset = new Args(); + reset.put("maxTotalDataSizeMB", "1"); + reset.put("frozenTimePeriodInSecs", "1"); + update(reset); + rollHotBuckets(); + + long startTime = System.currentTimeMillis(); + long endTime = startTime + (maxSeconds * 1000); + while (true) { + long timeLeft = endTime - System.currentTimeMillis(); + if (timeLeft <= 0) { + break; + } + Thread.sleep(Math.min(1000, timeLeft)); + + if (this.getTotalEventCount() == 0) { + return this; + } + refresh(); + } + + throw new SplunkException(SplunkException.TIMEOUT, + "Index cleaning timed out"); + } + catch (InterruptedException e) + { + SplunkException f = new SplunkException( + SplunkException.INTERRUPTED, + "Index cleaning interrupted."); + f.initCause(e); + throw f; + } + finally { + update(saved); + } + } + + /** + * Indicates whether the data retrieved from this index has been + * UTF8-encoded. + * + * @return {@code true} if the retrieved data is in UTF8, {@code false} if + * not. + */ + public boolean getAssureUTF8() { + return getBoolean("assureUTF8"); + } + + /** + * Returns the total size of all bloom filter files. + * + * @return The total size of all bloom filter files, in KB. + */ + public int getBloomfilterTotalSizeKB() { + return getInteger("bloomfilterTotalSizeKB", 0); + } + + /** + * Returns the suggested size of the .tsidx file for the bucket rebuild + * process. + * Valid values are: "auto", a positive integer, or a positive + * integer followed by "KB", "MB", or "GB". + * + * @return The suggested size of the .tsidx file for the bucket rebuild + * process. + */ + public String getBucketRebuildMemoryHint() { + return getString("bucketRebuildMemoryHint"); + } + + /** + * Returns the absolute file path to the cold database for this index. + * This value may contain shell expansion terms. + * + * @return The absolute file path to the cold database, or {@code null} if + * not specified. + */ + public String getColdPath() { + return getString("coldPath", null); + } + + /** + * Returns the expanded absolute file path to the cold database for this + * index. + * + * @return The expanded absolute file path to the cold database, or + * {@code null} if not specified. + */ + public String getColdPathExpanded() { + return getString("coldPath_expanded", null); + } + + /** + * Returns the frozen archive destination path for this index. + * + * @return The frozen archive destination path, or {@code null} if not + * specified. + */ + public String getColdToFrozenDir() { + return getString("coldToFrozenDir", null); + } + + /** + * Returns the path to the archiving script. + * <p>For more info about archiving scripts, see the + * <a href="http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTindex#POST_data.2Findexes" + * target="_blank">POST data/indexes endpoint</a> in the REST API + * documentation. + * @see #getColdToFrozenDir + * + * @return The archiving script, or {@code null} if not specified. + */ + public String getColdToFrozenScript() { + return getString("coldToFrozenScript", null); + } + + /** + * Indicates whether raw data is compressed. + * + * @deprecated Splunk always compresses raw data. + * @return {@code true} if raw data is compressed, {@code false} if not. + */ + public boolean getCompressRawdata() { + return getBoolean("compressRawdata"); + } + + /** + * Returns the current size of this index. + * + * @return The current size of the index, in MB. + */ + public int getCurrentDBSizeMB() { + return getInteger("currentDBSizeMB"); + } + + /** + * Return the default index name of the Splunk instance. + * + * @return The default index name. + */ + public String getDefaultDatabase() { + return getString("defaultDatabase"); + } + + /** + * Returns whether asynchronous "online fsck" bucket repair is enabled. + * <p> + * When this feature is enabled, you don't have to wait for buckets to be + * repaired before starting Splunk, but you might notice a slight + * degradation in performance as a result. + * @return {@code true} if bucket repair is enabled, {@code false} if + * not. + */ + public boolean getEnableOnlineBucketRepair() { + + return getBoolean("enableOnlineBucketRepair"); + } + + /** + * Indicates whether real-time search is enabled for this index. + * + * @return {@code true} if real-time search is enabled, {@code false} if + * not. + */ + public boolean getEnableRealtimeSearch() { + return getBoolean("enableRealtimeSearch"); + } + + /** + * Returns the maximum age for a bucket, after which the data in this index + * rolls to frozen. If archiving is necessary for frozen data, see the + * {@code coldToFrozen} attributes. + * + * @return The maximum age, in seconds, after which data rolls to frozen. + */ + public int getFrozenTimePeriodInSecs() { + return getInteger("frozenTimePeriodInSecs"); + } + + /** + * Returns the absolute path to both hot and warm buckets for this index. + * This value may contain shell expansion terms. + * + * @return This index's absolute path to both hot and warm buckets, or + * {@code null} if not specified. + */ + public String getHomePath() { + return getString("homePath", null); + } + + /** + * Returns the expanded absolute path to both hot and warm buckets for this + * index. + * + * @return The expanded absolute path to both hot and warm buckets, or + * {@code null} if not specified. + */ + public String getHomePathExpanded() { + return getString("homePath_expanded", null); + } + + /** + * Returns the index thread for this index. + * + * @return The index thread. + */ + public String getIndexThreads() { + return getString("indexThreads"); + } + + /** + * Returns the last initialization time for this index. + * + * @return The last initialization time, or {@code null} if not specified. + */ + public String getLastInitTime() { + return getString("lastInitTime", null); + } + + /** + * Returns the time that indicates a bucket age. When a warm or cold bucket + * is older than this, Splunk does not create or rebuild its bloomfilter. + * The valid format is <i>number</i> followed by a time unit ("s", "m", "h", + * or "d"). For example, "30d" for 30 days. + * @return String value + */ + public String getMaxBloomBackfillBucketAge() { + return getString("maxBloomBackfillBucketAge", null); + } + + /** + * Returns the maximum number of concurrent optimize processes that + * can run against a hot bucket for this index. + * + * @return The maximum number of concurrent optimize processes. + */ + public int getMaxConcurrentOptimizes() { + return getInteger("maxConcurrentOptimizes"); + } + + /** + * Returns the maximum data size before triggering a roll from hot to warm + * buckets for this index. + * + * @return The maximum data size, in MB, or "auto" (which means 750MB), or + * "auto_high_volume" (which means 10GB on a 64-bit system, or 1GB on a + * 32-bit system). + * @see #setMaxDataSize + */ + public String getMaxDataSize() { + return getString("maxDataSize"); + } + + /** + * Returns the maximum number of hot buckets that can exist for this index. + * + * @return The maximum number of hot buckets or "auto" (which means 3). + */ + public String getMaxHotBuckets() { + return getString("maxHotBuckets"); + } + + /** + * Returns the maximum lifetime of a hot bucket for this index. + * If a hot bucket exceeds this value, Splunk rolls it to warm. + * A value of 0 means an infinite lifetime. + * + * @return The hot bucket's maximum lifetime, in seconds. + */ + public int getMaxHotIdleSecs() { + return getInteger("maxHotIdleSecs"); + } + + /** + * Returns the upper bound of the target maximum timespan of + * hot and warm buckets for this index. + * + * @return The upper bound of the target maximum timespan, in seconds. + */ + public int getMaxHotSpanSecs() { + return getInteger("maxHotSpanSecs"); + } + + /** + * Returns the amount of memory to allocate for buffering + * a single .tsidx file into memory before flushing to disk. + * + * @return The amount of memory, in MB. + */ + public int getMaxMemMB() { + return getInteger("maxMemMB"); + } + + /** + * Returns the maximum number of unique lines that are allowed + * in a bucket's .data files for this index. A value of 0 means infinite + * lines. + * + * @return The maximum number of unique lines. + */ + public int getMaxMetaEntries() { + return getInteger("maxMetaEntries"); + } + + /** + * Returns the maximum number of concurrent helper processes for this index. + * + * @return The maximum number of concurrent helper processes. + */ + public int getMaxRunningProcessGroups() { + return getInteger("maxRunningProcessGroups", 0); + } + + /** + * Returns the maximum time attribute for this index. + * + * @return The maximum time attribute, or {@code null} if not specified. + */ + public Date getMaxTime() { + return getDate("maxTime", null); + } + + /** + * Returns the maximum size of this index. If an index + * grows larger than this value, the oldest data is frozen. + * + * @return The maximum index size, in MB. + */ + public int getMaxTotalDataSizeMB() { + return getInteger("maxTotalDataSizeMB"); + } + + /** + * Returns the upper limit, in seconds, for how long an event can sit in a + * raw slice. This value applies only when replication is enabled for this + * index, and is ignored otherwise.<br> + * If there are any acknowledged events sharing this raw slice, the + * {@code MaxTimeUnreplicatedWithAcksparamater} applies instead. + * @see #getMaxTimeUnreplicatedWithAcks + * @return int value + */ + public int getMaxTimeUnreplicatedNoAcks() { + return getInteger("maxTimeUnreplicatedNoAcks"); + } + + /** + * Returns the upper limit, in seconds, for how long an event can sit + * unacknowledged in a raw slice. This value only applies when indexer + * acknowledgement is enabled on forwarders and replication is enabled with + * clustering. + * @return int value + */ + public int getMaxTimeUnreplicatedWithAcks() { + return getInteger("maxTimeUnreplicatedWithAcks"); + } + + /** + * Returns the maximum number of warm buckets for this index. If this + * value is exceeded, the warm buckets with the lowest value for their + * latest times are moved to cold. + * + * @return The maximum number of warm buckets. + */ + public int getMaxWarmDBCount() { + return getInteger("maxWarmDBCount"); + } + + /** + * Returns the memory pool for this index. + * + * @return The memory pool, in MB or "auto". + */ + public String getMemPoolMB() { + return getString("memPoolMB"); + } + + /** + * Returns the frequency at which Splunkd forces a filesystem sync while + * compressing journal slices for this index. + * <p> + * A value of "disable" disables this feature completely, while a value of 0 + * forces a file-system sync after completing compression of every journal + * slice. + * + * @return The file-system sync frequency, as an integer or "disable". + */ + public String getMinRawFileSyncSecs() { + return getString("minRawFileSyncSecs"); + } + + /** + * Returns the minimum time attribute for this index. + * + * @return The minimum time attribute, or {@code null} if not specified. + */ + public Date getMinTime() { + return getDate("minTime", null); + } + + /** + * Returns the number of hot buckets that were created for this index. + * + * @return The number of hot buckets. + */ + public int getNumHotBuckets() { + return getInteger("numHotBuckets", 0); + } + + /** + * Returns the number of warm buckets created for this index. + * + * @return The number of warm buckets. + */ + public int getNumWarmBuckets() { + return getInteger("numWarmBuckets", 0); + } + + /** + * Returns the number of bloom filters created for this index. + * + * @return The number of bloom filters. + */ + public int getNumBloomfilters() { + return getInteger("numBloomfilters", 0); + } + + /** + * Returns the frequency at which metadata is for partially synced (synced + * in-place) for this index. A value of 0 disables partial syncing, so + * metadata is only synced on the {@code ServiceMetaPeriod} interval. + * @see #getServiceMetaPeriod + * @see #setServiceMetaPeriod + * + * @return The metadata sync interval, in seconds. + */ + public int getPartialServiceMetaPeriod() { + return getInteger("partialServiceMetaPeriod"); + } + + /** + * Returns the future event-time quarantine for this index. Events + * that are newer than now plus this value are quarantined. + * + * @return The future event-time quarantine, in seconds. + */ + public int getQuarantineFutureSecs() { + return getInteger("quarantineFutureSecs"); + } + + /** + * Returns the past event-time quarantine for this index. Events + * that are older than now minus this value are quarantined. + * + * @return The past event-time quarantine, in seconds. + */ + public int getQuarantinePastSecs() { + return getInteger("quarantinePastSecs"); + } + + /** + * Returns the target uncompressed size of individual raw slices in the + * rawdata journal for this index. + * + * @return The target uncompressed size, in bytes. + */ + public int getRawChunkSizeBytes() { + return getInteger("rawChunkSizeBytes"); + } + + /** + * Returns the frequency to check for the need to create a new hot bucket + * and the need to roll or freeze any warm or cold buckets for this index. + * + * @return The check frequency, in seconds. + */ + public int getRotatePeriodInSecs() { + return getInteger("rotatePeriodInSecs"); + } + + /** + * Returns the frequency at which metadata is synced to disk for this index. + * + * @return The meta data sync frequency, in seconds. + */ + public int getServiceMetaPeriod() { + return getInteger("serviceMetaPeriod"); + } + + /** + * Returns a list of indexes that suppress "index missing" messages. + * + * @return A comma-separated list of indexes. + */ + public String getSuppressBannerList() { + return getString("suppressBannerList", null); + } + + /** + * Returns the number of events that trigger the indexer to sync events. + * This value is global, not a per-index value. + * + * @return The number of events that trigger the indexer to sync events. + */ + public int getSync() { + return getInteger("sync"); + } + + /** + * Indicates whether the sync operation is called before the file + * descriptor is closed on metadata updates. + * + * @return {@code true} if the sync operation is called before the file + * descriptor is closed on metadata updates, {@code false} if not. + */ + public boolean getSyncMeta() { + return getBoolean("syncMeta"); + } + + /** + * Returns the absolute path to the thawed index for this index. This value + * may contain shell expansion terms. + * + * @return The absolute path to the thawed index, or {@code null} if not + * specified. + */ + public String getThawedPath() { + return getString("thawedPath", null); + } + + /** + * Returns the expanded absolute path to the thawed index for this index. + * + * @return The expanded absolute path to the thawed index, or {@code null} + * if not specified. + */ + public String getThawedPathExpanded() { + return getString("thawedPath_expanded", null); + } + + /** + * Returns the frequency at which Splunk checks for an index throttling + * condition. + * + * @return The frequency of the throttling check, in seconds. + */ + public int getThrottleCheckPeriod() { + return getInteger("throttleCheckPeriod"); + } + + /** + * Returns the total event count for this index. + * + * @return The total event count. + */ + public int getTotalEventCount() { + return getInteger("totalEventCount"); + } + + /** + * Indicates whether this index is an internal index. + * + * @return {@code true} if this index is an internal index, {@code false} + * if not. + */ + public boolean isInternal() { + return getBoolean("isInternal"); + } + + /** + * Performs rolling hot buckets for this index. + */ + public void rollHotBuckets() { + ResponseMessage response = service.post(path + "/roll-hot-buckets"); + assert(response.getStatus() == 200); + } + + /** + * Sets whether the data retrieved from this index is UTF8-encoded. + * <p> + * <b>Note:</b> Indexing performance degrades when this parameter is set to + * {@code true}. + * + * In Splunk 5.0 and later, this is a global property and cannot be set on + * a per-index basis. + * + * @param assure {@code true} to ensure UTF8 encoding, {@code false} if not. + */ + public void setAssureUTF8(boolean assure) { + setCacheValue("assureUTF8", assure); + } + + /** + * Sets the number of events that make up a block for block signatures. A + * value of 100 is recommended. A value of 0 disables block signing for this + * index. + * + * @param value The event count for block signing. + */ + public void setBlockSignSize(int value) { + setCacheValue("blockSignSize", value); + } + + + /** + * Sets the suggested size of the .tsidx file for the bucket rebuild + * process. + * + * Valid values are: "auto", a positive integer, or a positive + * integer followed by "KB", "MB", or "GB". + * + * @param value The suggested size of the .tsidx file for the bucket rebuild + * process. + */ + public void setBucketRebuildMemoryHint(String value) { + setCacheValue("bucketRebuildMemoryHint", value); + } + + /** + * Sets the destination path for the frozen archive, where Splunk + * automatically puts frozen buckets. The bucket freezing policy is as + * follows: + * <ul><li><b>New-style buckets (4.2 and later):</b> All files are removed + * except the raw data. To thaw frozen buckets, run {@code Splunk rebuild + * <bucket dir>} on the bucket, then move the buckets to the thawed + * directory.</li> + * <li><b>Old-style buckets (4.1 and earlier):</b> gzip all the .data and + * .tsidx files. To thaw frozen buckets, gunzip the zipped files and move + * the buckets to the thawed directory.</li></ul> + * If both {@code coldToFrozenDir} and {@code coldToFrozenScript} are + * specified, {@code coldToFrozenDir} takes precedence. + * @see #setColdToFrozenScript + * @see #getColdToFrozenScript + * + * @param destination The destination path for the frozen archive. + */ + public void setColdToFrozenDir(String destination) { + setCacheValue("coldToFrozenDir", destination); + } + + /** + * Sets the path to the archiving script. + * <p>For more info about archiving scripts, see the + * <a href="http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTindex#POST_data.2Findexes" + * target="_blank">POST data/indexes endpoint</a> in the REST API + * documentation. + * @see #setColdToFrozenDir + * @see #getColdToFrozenDir + * + * @param script The path to the archiving script. + */ + public void setColdToFrozenScript(String script) { + setCacheValue("coldToFrozenScript", script); + } + + /** + * Sets whether asynchronous "online fsck" bucket repair is enabled. + * <p> + * When this feature is enabled, you don't have to wait for buckets to be + * repaired before starting Splunk, but you might notice a slight + * degradation in performance as a result. + * + * @param value {@code true} to enable online bucket repair, {@code false} + * if not. + */ + public void setEnableOnlineBucketRepair(boolean value) { + setCacheValue("enableOnlineBucketRepair", value); + } + + /** + * Sets the maximum age for a bucket, after which the data in this index + * rolls to frozen. Freezing data removes it from the index. To archive + * data, see {@code coldToFrozenDir} and {@code coldToFrozenScript}. + * @see #setColdToFrozenDir + * @see #setColdToFrozenScript + * + * @param seconds The time, in seconds, after which indexed data rolls to + * frozen. + */ + public void setFrozenTimePeriodInSecs(int seconds) { + setCacheValue("frozenTimePeriodInSecs", seconds); + } + + /** + * Sets the time that indicates a bucket age. When a warm or cold bucket + * is older than this, Splunk does not create or rebuild its bloomfilter. + * The valid format is <i>number</i> followed by a time unit ("s", "m", "h", + * or "d"). For example, "30d" for 30 days. + * @param time The time that indicates a bucket age. + */ + public void setMaxBloomBackfillBucketAge(String time) { + setCacheValue("maxBloomBackfillBucketAge", time); + } + + /** + * Sets the number of concurrent optimize processes that can run against + * a hot bucket for this index. + * + * @param processes The number of concurrent optimize processes. + */ + public void setMaxConcurrentOptimizes(int processes) { + setCacheValue("maxConcurrentOptimizes", processes); + } + + /** + * Sets the maximum data size before triggering a roll from hot to warm + * buckets for this index. You can also specify a value to let Splunk + * autotune this parameter: use "auto_high_volume" for high-volume indexes + * (such as the main index, or one that gets over 10GB of data per day); + * otherwise, use "auto". + * @see #getMaxDataSize + * + * @param size The size in MB, or an autotune string. + */ + public void setMaxDataSize(String size) { + setCacheValue("maxDataSize", size); + } + + /** + * Sets the maximum number of hot buckets that can exist per index. + * <p> + * When {@code maxHotBuckets} is exceeded, Splunk rolls the least recently + * used (LRU) hot bucket to warm. Both normal hot buckets and quarantined + * hot buckets count towards this total. This setting operates independently + * of {@code MaxHotIdleSecs}, which can also cause hot buckets to roll. + * @see #setMaxHotIdleSecs + * @see #getMaxHotIdleSecs + * + * @param size The maximum number of hot buckets per index, or an 'auto' string. + */ + public void setMaxHotBuckets(String size) { + setCacheValue("maxHotBuckets", size); + } + + /** + * Sets the maximum lifetime of a hot bucket for this index. + * <p> + * If a hot bucket exceeds this value, Splunk rolls it to warm. + * This setting operates independently of {@code MaxHotBuckets}, which can + * also cause hot buckets to roll. + * @see #setMaxHotBuckets + * @see #getMaxHotBuckets + * + * @param seconds The hot bucket's maximum lifetime, in seconds. A value of + * 0 means an infinite lifetime. + */ + public void setMaxHotIdleSecs(int seconds) { + setCacheValue("maxHotIdleSecs", seconds); + } + + /** + * Sets the upper bound of the target maximum timespan of hot and warm + * buckets for this index. + * <p> + * <b>Note:</b> If you set this too small, you can get an explosion of + * hot and warm buckets in the file system. The system sets a lower bound + * implicitly for this parameter at 3600, but this advanced parameter should + * be set with care and understanding of the characteristics of your data. + * + * @param seconds The upper bound of the target maximum timespan, in + * seconds. + */ + public void setMaxHotSpanSecs(int seconds) { + setCacheValue("maxHotSpanSecs", seconds); + } + + /** + * Sets the amount of memory allocated for buffering a single .tsidx + * file before flushing to disk. + * + * @param memory The amount of memory, in MB. + */ + public void setMaxMemMB(int memory) { + setCacheValue("maxMemMB", memory); + } + + /** + * Sets the maximum number of unique lines in .data files in a bucket, which + * may help to reduce memory consumption. + * <p> + * If this value is exceeded, a hot bucket is rolled to prevent a further + * increase. If your buckets are rolling due to Strings.data hitting this + * limit, the culprit might be the "punct" field in your data. If you don't + * use that field, it might be better to just disable this (see the + * props.conf.spec in $SPLUNK_HOME/etc/system/README). + * + * @param entries The maximum number of unique lines. A value of 0 means + * infinite lines. + */ + public void setMaxMetaEntries(int entries) { + setCacheValue("maxMetaEntries", entries); + } + + + /** + * Sets the upper limit for how long an event can sit in a + * raw slice. This value applies only when replication is enabled for this + * index, and is ignored otherwise.<br> + * If there are any acknowledged events sharing this raw slice, the + * {@code MaxTimeUnreplicatedWithAcksparamater} applies instead. + * + * @param value The upper limit, in seconds. A value of 0 disables this + * setting. + */ + public void setMaxTimeUnreplicatedNoAcks(int value) { + setCacheValue("maxTimeUnreplicatedNoAcks", value); + } + + /** + * Sets the upper limit for how long an event can sit unacknowledged in a + * raw slice. This value only applies when indexer acknowledgement is + * enabled on forwarders and replication is enabled with clustering. + * <p> + * This number should not exceed the acknowledgement timeout configured on + * any forwarder. + * + * @param value The upper limit, in seconds. A value of 0 disables this + * setting (not recommended). + */ + public void setMaxTimeUnreplicatedWithAcks(int value) { + setCacheValue("maxTimeUnreplicatedWithAcks", value); + } + + /** + * Sets the maximum size for this index. If an index grows larger than this + * value, the oldest data is frozen. + * + * @param size The maximum index size, in MB. + */ + public void setMaxTotalDataSizeMB(int size) { + setCacheValue("maxTotalDataSizeMB", size); + } + + /** + * Sets the maximum number of warm buckets. If this number is exceeded, + * the warm buckets with the lowest value for their latest times will be + * moved to cold. + * + * @param buckets The maximum number of warm buckets. + */ + public void setMaxWarmDBCount(int buckets) { + setCacheValue("maxWarmDBCount", buckets); + } + + /** + * Sets the frequency at which Splunkd forces a file system sync while + * compressing journal slices for this index. A value of "disable" disables + * this feature completely, while a value of 0 forces a file-system sync + * after completing compression of every journal slice. + * + * @param frequency The file-system sync frequency, as an integer or + * "disable". + */ + public void setMinRawFileSyncSecs(String frequency) { + setCacheValue("minRawFileSyncSecs", frequency); + } + + /** + * Sets the frequency at which metadata is for partially synced (synced + * in-place) for this index. A value of 0 disables partial syncing, so + * metadata is only synced on the {@code ServiceMetaPeriod} interval. + * @see #setServiceMetaPeriod + * @see #getServiceMetaPeriod + * + * @param frequency The metadata sync interval, in seconds. + */ + public void setPartialServiceMetaPeriod(int frequency) { + setCacheValue("partialServiceMetaPeriod", frequency); + } + + /** + * Sets a quarantine for events that are timestamped in the future to help + * prevent main hot buckets from being polluted with fringe events. Events + * that are newer than "now" plus this value are quarantined. + * + * @param window The future event-time quarantine, in seconds. + */ + public void setQuarantineFutureSecs(int window) { + setCacheValue("quarantineFutureSecs", window); + } + + /** + * Sets a quarantine for events that are timestamped in the past to help + * prevent main hot buckets from being polluted with fringe events. Events + * that are older than "now" plus this value are quarantined. + * + * @param window The past event-time quarantine, in seconds. + */ + public void setQuarantinePastSecs(int window) { + setCacheValue("quarantinePastSecs", window); + } + + /** + * Sets the target uncompressed size of individual raw slices in the rawdata + * journal for this index. + * <p> + * This parameter only specifies a target chunk size. The actual chunk size + * might be slightly larger by an amount proportional to an individual event + * size. + * <blockquote> + * <b>WARNING:</b> This is an advanced parameter. Only change it if you are + * instructed to do so by Splunk Support. + * </blockquote> + * @param size The target uncompressed size, in bytes. (0 is not a valid + * value--if 0 is used, this parameter is set to the default value.) + */ + public void setRawChunkSizeBytes(int size) { + setCacheValue("rawChunkSizeBytes", size); + } + + /** + * Sets the frequency to check for the need to create a new hot bucket and + * the need to roll or freeze any warm or cold buckets for this index. + * + * @param frequency The check frequency, in seconds. + */ + public void setRotatePeriodInSecs(int frequency) { + setCacheValue("rotatePeriodInSecs", frequency); + } + + /** + * Sets the frequency at which metadata is synced to disk for this index. + * + * @param frequency The meta data sync frequency, in seconds. + */ + public void setServiceMetaPeriod(int frequency) { + setCacheValue("serviceMetaPeriod", frequency); + } + + /** + * Sets whether the sync operation is called before the file descriptor is + * closed on metadata updates. + * <p> + * This functionality improves the integrity of metadata files, especially + * with regard to operating system crashes and machine failures. + * <blockquote> + * <b>WARNING:</b> This is an advanced parameter. Only change it if you are + * instructed to do so by Splunk Support. + * </blockquote> + * @param sync {@code true} to call the sync operation before the file + * descriptor is closed on metadata updates, {@code false} if not. + */ + public void setSyncMeta(boolean sync) { + setCacheValue("syncMeta", sync); + } + + /** + * Sets the frequency at which Splunk checks for an index throttling + * condition. + * + * @param frequency The frequency of the throttling check, in seconds. + */ + public void setThrottleCheckPeriod(int frequency) { + setCacheValue("throttleCheckPeriod", frequency); + } + + /** + * Submits an event to this index through an HTTP POST request. + * + * @param data The event data to post. + */ + public void submit(String data) { + Receiver receiver = service.getReceiver(); + receiver.submit(getName(), data); + } + + /** + * Submits an event to this index through an HTTP POST request. + * + * @param args Optional arguments for this request. Valid parameters are: + * "host", "host_regex", "source", and "sourcetype". + * @param data The event data to post. + */ + public void submit(Args args, String data) { + Receiver receiver = service.getReceiver(); + receiver.submit(getName(), args, data); + } + + /** + * Uploads a file to this index as an event stream. + * <p> + * <b>Note:</b> This file must be directly accessible by the Splunk server. + * + * @param filename The path and filename. + */ + public void upload(String filename) { + EntityCollection<Upload> uploads = service.getUploads(); + Args args = new Args("index", getName()); + uploads.create(filename, args); + } + + /** + * Uploads a file to this index as an event stream. + * <p> + * <b>Note:</b> This file must be directly accessible by the Splunk server. + * + * @param filename The path and filename. + * + * @param args Optional arguments for this request. Valid parameters are: + * "host", "sourcetype", "rename-source". More found at: + * http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTinput#data.2Finputs.2Foneshot + * + */ + public void upload(String filename, Args args) { + EntityCollection<Upload> uploads = service.getUploads(); + if(args.containsKey("index")){ + throw new IllegalArgumentException("The 'index' parameter cannot be passed to an index's oneshot upload."); + } + args.add("index", getName()); + uploads.create(filename, args); + } +} + diff --git a/splunk/src/main/java/com/splunk/InputCollection.java b/splunk/src/main/java/com/splunk/InputCollection.java index e94b2c28..4f871936 100644 --- a/splunk/src/main/java/com/splunk/InputCollection.java +++ b/splunk/src/main/java/com/splunk/InputCollection.java @@ -1,405 +1,405 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.*; - -/** - * The {@code InputCollection} class represents a collection of inputs. The - * collection is heterogeneous and each member contains an {@code InputKind} - * value that indicates the specific type of input. - */ -public class InputCollection extends EntityCollection<Input> { - protected Set<InputKind> inputKinds = new HashSet<>(); - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - */ - InputCollection(Service service) { - super(service, "data/inputs"); - } - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - */ - InputCollection(Service service, Args args) { - super(service, "data/inputs", args); - } - - /** {@inheritDoc} */ - @Override public boolean containsKey(Object key) { - Input input = retrieveInput((String)key); - return (input != null); - } - - /** - * Creates a stub for a new data input. - * - * @param name Depending on the type of input, a string that contains: - * <ul><li>The filename or directory and path (for monitor and oneshot - * inputs)</li> - * <li> The script name (for script inputs)</li> - * <li> The port number (for TCP and UDP inputs)</li> - * <li> The collection name (for Windows Perfmon and WMI inputs)</li> - * <li> The stanza (for Windows Registry inputs)</li> - * <li> The name of the configuration (for Windows AD inputs)</li></ul> - * @return No return value. - * @throws UnsupportedOperationException The UnsupportedOperationException instance - */ - @Override public Input create(String name) { - throw new UnsupportedOperationException(); - } - - /** - * Creates a stub for a new data input based on additional arguments. - * - * @param name Depending on the type of data input, a string that contains: - * <ul><li>The filename or directory and path (for monitor and oneshot - * inputs)</li> - * <li> The script name (for script inputs)</li> - * <li> The port number (for TCP and UDP inputs)</li> - * <li> The collection name (for Windows Perfmon and WMI inputs)</li> - * <li> The stanza (for Windows Registry inputs)</li> - * <li> The name of the configuration (for Windows AD inputs)</li></ul> - * @param args Optional arguments to define the data input. For a list of - * the available parameters, see - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2#inputparams" - * target="_blank">Input parameters</a> on - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" - * target="_blank">dev.splunk.com</a>. - * @return No return value. - * @throws UnsupportedOperationException The UnsupportedOperationException instance - */ - @Override public Input create(String name, Map args) { - throw new UnsupportedOperationException(); - } - - /** - * Creates a new data input based on the input kind. - * - * @param name Depending on the type of data input, a string that contains: - * <ul><li>The filename or directory and path (for monitor and oneshot - * inputs)</li> - * <li> The script name (for script inputs)</li> - * <li> The port number (for TCP and UDP inputs)</li> - * <li> The collection name (for Windows Perfmon and WMI inputs)</li> - * <li> The stanza (for Windows Registry inputs)</li> - * <li> The name of the configuration (for Windows AD inputs)</li></ul> - * @param kind A member of {@code InputKind}, indicating the type of input. - * @param <T> The implicit type of the input. - * @return The {@code Input} that was created. - */ - public <T extends Input> T create(String name, InputKind kind) { - return (T)create(name, kind, (Map<String, Object>)null); - } - - /** - * Creates a new data input based on the input kind and additional - * arguments. - * - * @param name Depending on the type of data input, a string that contains: - * <ul><li>The filename or directory and path (for monitor and oneshot - * inputs)</li> - * <li> The script name (for script inputs)</li> - * <li> The port number (for TCP and UDP inputs)</li> - * <li> The collection name (for Windows Perfmon and WMI inputs)</li> - * <li> The stanza (for Windows Registry inputs)</li> - * <li> The name of the configuration (for Windows AD inputs)</li></ul> - * @param kind A member of {@code InputKind}, indicating the type of input. - * @param args Optional arguments to define the data input. For a list of - * the available parameters, see - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2#inputparams" - * target="_blank">Input parameters</a> on - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" - * target="_blank">dev.splunk.com</a>. - * @param <T> The implicit type of the input. - * @return The {@code Input} that was created. - */ - public <T extends Input> T - create(String name, InputKind kind, Map<String, Object> args) { - args = Args.create(args).add("name", name); - String path = this.path + "/" + kind.getRelativePath(); - service.post(path, args); - - invalidate(); - - return (T)get(name); - } - - /** - * Creates a new data input based on an Atom entry. - * - * @param entry The {@code AtomEntry} object describing the entry. - * @return The {@code Input} that was created. - */ - @Override - protected Input createItem(AtomEntry entry) { - String path = itemPath(entry); - InputKind kind = itemKind(path); - Class inputClass = kind.getInputClass(); - return createItem(inputClass, path, null); - } - - /** - * {@inheritDoc} - */ - @Override public Input get(Object key) { - return retrieveInput((String)key); - } - - /** - * Returns the value of a scoped, namespace-constrained key, if it - * exists within this collection. - * - * @param key The key to look up. - * @param namespace The namespace to constrain the search to. - * @return The value indexed by the key, or {@code null} if it doesn't - * exist. - */ - public Input get(Object key, Args namespace) { - Util.ensureNamespaceIsExact(namespace); - return retrieveInput((String)key, namespace); - } - - /** - * Returns the input kind for a given path. - * - * @param path The relative endpoint path (the path that follows - * data/inputs). - * @return A member of {@code InputKind}, indicating the type of input. - */ - protected InputKind itemKind(String path) { - String relpathWithInputName = Util.substringAfter(path, "/data/inputs/", null); - for (InputKind kind : inputKinds) { - if (relpathWithInputName.startsWith(kind.getRelativePath())) { - return kind; - } - } - - // Not good. This means that there is an input of an unknown kind. - return InputKind.Unknown; - } - - /** - * Return a set of all the input kinds recognized by the Splunk server. - * - * @return A set of {@code InputKind}s. - */ - public Set<InputKind> getInputKinds() { - return this.inputKinds; - } - - /** - * Indicates whether a given string matches the input name (string - * equality). For scripted inputs, which are listed by their full path, this - * method compares only the final component of the filename for a match. - * - * @param kind A member of {@code InputKind}, indicating the type of input. - * @param searchFor A string to search for. - * @param searchIn The string that contains the input name. - * @return {@code true} if the string matches the input name, {@code false} - * if not. - */ - protected static boolean matchesInputName(InputKind kind, String searchFor, String searchIn) { - if (kind == InputKind.Script) { - return searchIn.endsWith("/" + searchFor) || searchIn.endsWith("\\" + searchFor); - } else { - return searchFor.equals(searchIn); - } - } - - - /** - * Assembles a set of all the input kinds that are available on this Splunk - * instance. To list all inputs, pass an empty list to {@code subPath}. Or, - * specify a component of the path such as "tcp" to list all TCP inputs. - * - * @param subPath A list of strings containing the components of the - * endpoint path that follow data/inputs/. - * @return A set of available {@code InputKind}s. - */ - private Set<InputKind> assembleInputKindSet(List<String> subPath) { - Set<InputKind> kinds = new HashSet<>(); - ResponseMessage response = service.get(this.path + "/" + Util.join("/", subPath)); - AtomFeed feed = AtomFeed.parseStream(response.getContent()); - for (AtomEntry entry : feed.entries) { - String itemKeyName = itemKey(entry); - - boolean hasCreateLink = false; - for (String linkName : entry.links.keySet()) { - if (linkName.equals("create")) { - hasCreateLink = true; - } - } - - List<String> thisSubPath = new ArrayList<>(subPath); - thisSubPath.add(itemKeyName); - - String relpath = Util.join("/", thisSubPath); - - if (relpath.equals("all") || relpath.equals("tcp/ssl")) { - // Skip these input types - continue; - } else if (hasCreateLink) { - // Found an InputKind leaf - InputKind newKind = InputKind.create(relpath); - kinds.add(newKind); - } else { - Set<InputKind> subKinds = assembleInputKindSet(thisSubPath); - kinds.addAll(subKinds); - } - } - return kinds; - } - - /** - * Refreshes the {@code inputKinds} field on this object. - */ - private void refreshInputKinds() { - Set<InputKind> kinds = assembleInputKindSet(new ArrayList<>()); - - this.inputKinds.clear(); - this.inputKinds.addAll(kinds); - } - - /** - * Refreshes this input collection. - * - * @return The refreshed {@code InputCollection}. - */ - @Override public InputCollection refresh() { - // Populate this.inputKinds - refreshInputKinds(); - - items.clear(); - - // Iterate over all input kinds and collect all instances. - for (InputKind kind : this.inputKinds) { - if (service.versionIsAtLeast("6.0.0") && kind.getKind().equals("registry")) { - // In Splunk 6 and later, the registry endpoint has been deprecated in favor of the new - // WinRegMon modular input, but both now point to the same place. To avoid duplicates, we have - // to read only one of them. - continue; - } - String relpath = kind.getRelativePath(); - String inputs = String.format("%s/%s?count=-1", path, relpath); - ResponseMessage response; - try { - response = service.get(inputs); - } - catch (HttpException e) { - // On some platforms certain input endpoints don't exist, for - // example the Windows inputs endpoints don't exist on non- - // Windows platforms. - if (e.getStatus() == 404) continue; - throw e; - } - AtomFeed feed; - try { - feed = AtomFeed.parseStream(response.getContent()); - } catch (Exception e) { - throw new RuntimeException(e); - } - load(feed); - } - - return this; - } - - /** - * {@inheritDoc} - */ - @Override public Input remove(String key) { - Input input = retrieveInput(key); - if (input != null) { - input.remove(); - } - return input; - } - - /** - * {@inheritDoc} - */ - @Override public Input remove( - String key, Args namespace) { - Util.ensureNamespaceIsExact(namespace); - - Input input = retrieveInput(key, namespace); - if (input != null) { - input.remove(); - } - return input; - } - - private Input retrieveInput(String key) { - validate(); - - // Because scripted input names are not 1:1 with the original name - // (they are the absolute path on the Splunk instance followed by - // the original name), we will iterate over the entities in the list, - // and if we find one that matches, return it. - Set<Entry<String, LinkedList<Input>>> set = items.entrySet(); - for (Entry<String, LinkedList<Input>> entry: set) { - String entryKey = entry.getKey(); - LinkedList<Input> entryValue = entry.getValue(); - InputKind kind = entryValue.get(0).getKind(); - - if (InputCollection.matchesInputName(kind, key, entryKey)) { - if (entryValue.size() > 1) { - throw new SplunkException(SplunkException.AMBIGUOUS, - "Multiple inputs matched " + key + "; specify a namespace to disambiguate."); - } else { - return entryValue.get(0); - } - } - } - return null; - } - - private Input retrieveInput(String key, Args namespace) { - Util.ensureNamespaceIsExact(namespace); - validate(); - - // Because scripted input names are not 1:1 with the original name - // (they are the absolute path on the Splunk instance followed by - // the original name), we will iterate over the entities in the list, - // and if we find one that matches, return it. - String pathMatcher = service.fullpath("", namespace); - Set<Entry<String, LinkedList<Input>>> set = items.entrySet(); - for (Entry<String, LinkedList<Input>> entry: set) { - String entryKey = entry.getKey(); - LinkedList<Input> entryValue = entry.getValue(); - InputKind kind = entryValue.get(0).getKind(); - - if (InputCollection.matchesInputName(kind, key, entryKey)) { - for (Input entity: entryValue) { - if (entity.path.startsWith(pathMatcher)) { - return entity; - } - } - } - } - return null; - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.*; + +/** + * The {@code InputCollection} class represents a collection of inputs. The + * collection is heterogeneous and each member contains an {@code InputKind} + * value that indicates the specific type of input. + */ +public class InputCollection extends EntityCollection<Input> { + protected Set<InputKind> inputKinds = new HashSet<>(); + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + */ + InputCollection(Service service) { + super(service, "data/inputs"); + } + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + */ + InputCollection(Service service, Args args) { + super(service, "data/inputs", args); + } + + /** {@inheritDoc} */ + @Override public boolean containsKey(Object key) { + Input input = retrieveInput((String)key); + return (input != null); + } + + /** + * Creates a stub for a new data input. + * + * @param name Depending on the type of input, a string that contains: + * <ul><li>The filename or directory and path (for monitor and oneshot + * inputs)</li> + * <li> The script name (for script inputs)</li> + * <li> The port number (for TCP and UDP inputs)</li> + * <li> The collection name (for Windows Perfmon and WMI inputs)</li> + * <li> The stanza (for Windows Registry inputs)</li> + * <li> The name of the configuration (for Windows AD inputs)</li></ul> + * @return No return value. + * @throws UnsupportedOperationException The UnsupportedOperationException instance + */ + @Override public Input create(String name) { + throw new UnsupportedOperationException(); + } + + /** + * Creates a stub for a new data input based on additional arguments. + * + * @param name Depending on the type of data input, a string that contains: + * <ul><li>The filename or directory and path (for monitor and oneshot + * inputs)</li> + * <li> The script name (for script inputs)</li> + * <li> The port number (for TCP and UDP inputs)</li> + * <li> The collection name (for Windows Perfmon and WMI inputs)</li> + * <li> The stanza (for Windows Registry inputs)</li> + * <li> The name of the configuration (for Windows AD inputs)</li></ul> + * @param args Optional arguments to define the data input. For a list of + * the available parameters, see + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2#inputparams" + * target="_blank">Input parameters</a> on + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" + * target="_blank">dev.splunk.com</a>. + * @return No return value. + * @throws UnsupportedOperationException The UnsupportedOperationException instance + */ + @Override public Input create(String name, Map args) { + throw new UnsupportedOperationException(); + } + + /** + * Creates a new data input based on the input kind. + * + * @param name Depending on the type of data input, a string that contains: + * <ul><li>The filename or directory and path (for monitor and oneshot + * inputs)</li> + * <li> The script name (for script inputs)</li> + * <li> The port number (for TCP and UDP inputs)</li> + * <li> The collection name (for Windows Perfmon and WMI inputs)</li> + * <li> The stanza (for Windows Registry inputs)</li> + * <li> The name of the configuration (for Windows AD inputs)</li></ul> + * @param kind A member of {@code InputKind}, indicating the type of input. + * @param <T> The implicit type of the input. + * @return The {@code Input} that was created. + */ + public <T extends Input> T create(String name, InputKind kind) { + return (T)create(name, kind, (Map<String, Object>)null); + } + + /** + * Creates a new data input based on the input kind and additional + * arguments. + * + * @param name Depending on the type of data input, a string that contains: + * <ul><li>The filename or directory and path (for monitor and oneshot + * inputs)</li> + * <li> The script name (for script inputs)</li> + * <li> The port number (for TCP and UDP inputs)</li> + * <li> The collection name (for Windows Perfmon and WMI inputs)</li> + * <li> The stanza (for Windows Registry inputs)</li> + * <li> The name of the configuration (for Windows AD inputs)</li></ul> + * @param kind A member of {@code InputKind}, indicating the type of input. + * @param args Optional arguments to define the data input. For a list of + * the available parameters, see + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2#inputparams" + * target="_blank">Input parameters</a> on + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" + * target="_blank">dev.splunk.com</a>. + * @param <T> The implicit type of the input. + * @return The {@code Input} that was created. + */ + public <T extends Input> T + create(String name, InputKind kind, Map<String, Object> args) { + args = Args.create(args).add("name", name); + String path = this.path + "/" + kind.getRelativePath(); + service.post(path, args); + + invalidate(); + + return (T)get(name); + } + + /** + * Creates a new data input based on an Atom entry. + * + * @param entry The {@code AtomEntry} object describing the entry. + * @return The {@code Input} that was created. + */ + @Override + protected Input createItem(AtomEntry entry) { + String path = itemPath(entry); + InputKind kind = itemKind(path); + Class inputClass = kind.getInputClass(); + return createItem(inputClass, path, null); + } + + /** + * {@inheritDoc} + */ + @Override public Input get(Object key) { + return retrieveInput((String)key); + } + + /** + * Returns the value of a scoped, namespace-constrained key, if it + * exists within this collection. + * + * @param key The key to look up. + * @param namespace The namespace to constrain the search to. + * @return The value indexed by the key, or {@code null} if it doesn't + * exist. + */ + public Input get(Object key, Args namespace) { + Util.ensureNamespaceIsExact(namespace); + return retrieveInput((String)key, namespace); + } + + /** + * Returns the input kind for a given path. + * + * @param path The relative endpoint path (the path that follows + * data/inputs). + * @return A member of {@code InputKind}, indicating the type of input. + */ + protected InputKind itemKind(String path) { + String relpathWithInputName = Util.substringAfter(path, "/data/inputs/", null); + for (InputKind kind : inputKinds) { + if (relpathWithInputName.startsWith(kind.getRelativePath())) { + return kind; + } + } + + // Not good. This means that there is an input of an unknown kind. + return InputKind.Unknown; + } + + /** + * Return a set of all the input kinds recognized by the Splunk server. + * + * @return A set of {@code InputKind}s. + */ + public Set<InputKind> getInputKinds() { + return this.inputKinds; + } + + /** + * Indicates whether a given string matches the input name (string + * equality). For scripted inputs, which are listed by their full path, this + * method compares only the final component of the filename for a match. + * + * @param kind A member of {@code InputKind}, indicating the type of input. + * @param searchFor A string to search for. + * @param searchIn The string that contains the input name. + * @return {@code true} if the string matches the input name, {@code false} + * if not. + */ + protected static boolean matchesInputName(InputKind kind, String searchFor, String searchIn) { + if (kind == InputKind.Script) { + return searchIn.endsWith("/" + searchFor) || searchIn.endsWith("\\" + searchFor); + } else { + return searchFor.equals(searchIn); + } + } + + + /** + * Assembles a set of all the input kinds that are available on this Splunk + * instance. To list all inputs, pass an empty list to {@code subPath}. Or, + * specify a component of the path such as "tcp" to list all TCP inputs. + * + * @param subPath A list of strings containing the components of the + * endpoint path that follow data/inputs/. + * @return A set of available {@code InputKind}s. + */ + private Set<InputKind> assembleInputKindSet(List<String> subPath) { + Set<InputKind> kinds = new HashSet<>(); + ResponseMessage response = service.get(this.path + "/" + Util.join("/", subPath)); + AtomFeed feed = AtomFeed.parseStream(response.getContent()); + for (AtomEntry entry : feed.entries) { + String itemKeyName = itemKey(entry); + + boolean hasCreateLink = false; + for (String linkName : entry.links.keySet()) { + if (linkName.equals("create")) { + hasCreateLink = true; + } + } + + List<String> thisSubPath = new ArrayList<>(subPath); + thisSubPath.add(itemKeyName); + + String relpath = Util.join("/", thisSubPath); + + if (relpath.equals("all") || relpath.equals("tcp/ssl")) { + // Skip these input types + continue; + } else if (hasCreateLink) { + // Found an InputKind leaf + InputKind newKind = InputKind.create(relpath); + kinds.add(newKind); + } else { + Set<InputKind> subKinds = assembleInputKindSet(thisSubPath); + kinds.addAll(subKinds); + } + } + return kinds; + } + + /** + * Refreshes the {@code inputKinds} field on this object. + */ + private void refreshInputKinds() { + Set<InputKind> kinds = assembleInputKindSet(new ArrayList<>()); + + this.inputKinds.clear(); + this.inputKinds.addAll(kinds); + } + + /** + * Refreshes this input collection. + * + * @return The refreshed {@code InputCollection}. + */ + @Override public InputCollection refresh() { + // Populate this.inputKinds + refreshInputKinds(); + + items.clear(); + + // Iterate over all input kinds and collect all instances. + for (InputKind kind : this.inputKinds) { + if (service.versionIsAtLeast("6.0.0") && kind.getKind().equals("registry")) { + // In Splunk 6 and later, the registry endpoint has been deprecated in favor of the new + // WinRegMon modular input, but both now point to the same place. To avoid duplicates, we have + // to read only one of them. + continue; + } + String relpath = kind.getRelativePath(); + String inputs = String.format("%s/%s?count=-1", path, relpath); + ResponseMessage response; + try { + response = service.get(inputs); + } + catch (HttpException e) { + // On some platforms certain input endpoints don't exist, for + // example the Windows inputs endpoints don't exist on non- + // Windows platforms. + if (e.getStatus() == 404) continue; + throw e; + } + AtomFeed feed; + try { + feed = AtomFeed.parseStream(response.getContent()); + } catch (Exception e) { + throw new RuntimeException(e); + } + load(feed); + } + + return this; + } + + /** + * {@inheritDoc} + */ + @Override public Input remove(String key) { + Input input = retrieveInput(key); + if (input != null) { + input.remove(); + } + return input; + } + + /** + * {@inheritDoc} + */ + @Override public Input remove( + String key, Args namespace) { + Util.ensureNamespaceIsExact(namespace); + + Input input = retrieveInput(key, namespace); + if (input != null) { + input.remove(); + } + return input; + } + + private Input retrieveInput(String key) { + validate(); + + // Because scripted input names are not 1:1 with the original name + // (they are the absolute path on the Splunk instance followed by + // the original name), we will iterate over the entities in the list, + // and if we find one that matches, return it. + Set<Entry<String, LinkedList<Input>>> set = items.entrySet(); + for (Entry<String, LinkedList<Input>> entry: set) { + String entryKey = entry.getKey(); + LinkedList<Input> entryValue = entry.getValue(); + InputKind kind = entryValue.get(0).getKind(); + + if (InputCollection.matchesInputName(kind, key, entryKey)) { + if (entryValue.size() > 1) { + throw new SplunkException(SplunkException.AMBIGUOUS, + "Multiple inputs matched " + key + "; specify a namespace to disambiguate."); + } else { + return entryValue.get(0); + } + } + } + return null; + } + + private Input retrieveInput(String key, Args namespace) { + Util.ensureNamespaceIsExact(namespace); + validate(); + + // Because scripted input names are not 1:1 with the original name + // (they are the absolute path on the Splunk instance followed by + // the original name), we will iterate over the entities in the list, + // and if we find one that matches, return it. + String pathMatcher = service.fullpath("", namespace); + Set<Entry<String, LinkedList<Input>>> set = items.entrySet(); + for (Entry<String, LinkedList<Input>> entry: set) { + String entryKey = entry.getKey(); + LinkedList<Input> entryValue = entry.getValue(); + InputKind kind = entryValue.get(0).getKind(); + + if (InputCollection.matchesInputName(kind, key, entryKey)) { + for (Input entity: entryValue) { + if (entity.path.startsWith(pathMatcher)) { + return entity; + } + } + } + } + return null; + } +} + diff --git a/splunk/src/main/java/com/splunk/LicensePool.java b/splunk/src/main/java/com/splunk/LicensePool.java index 7c0e38f5..b9565fe6 100644 --- a/splunk/src/main/java/com/splunk/LicensePool.java +++ b/splunk/src/main/java/com/splunk/LicensePool.java @@ -1,172 +1,172 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.HashMap; -import java.util.Map; - -/** -* The {@code LicensePool} class represents a license pool, which is made up -* of a single license master and zero or more license slave instances of Splunk -* that are configured to use the licensing volume from a set license or license - * stack. - */ -public class LicensePool extends Entity { - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The license pool endpoint. - */ - LicensePool(Service service, String path) { - super(service, path); - } - - /** - * Returns the description of this license pool. - * - * @return The description, or {@code null} if not specified. - */ - public String getDescription() { - return getString("description", null); - } - - /** - * Returns the indexing quota for this license pool. - * - * @return A string containing the indexing quota in bytes, or "MAX" to - * indicate the maximum amount that is allowed. - */ - public String getQuota() { - return getString("quota", "0"); - } - - /** - * Returns the list of slaves for this license pool. - * - * @return A comma-separated list of slaves by ID, or {@code null} if not - * specified. - */ - public String[] getSlaves() { - if (toUpdate.containsKey("slaves")) { - String value = (String)toUpdate.get("slaves"); - return value.split(","); - } - else { - return getStringArray("slaves", null); - } - } - - /** - * Returns the usage of indexing volume by slave licenses in this license - * pool. - * - * @return A map from each slave GUID to the number of bytes it is using. - */ - public Map<String, Long> getSlavesUsageBytes() { - @SuppressWarnings("unchecked") - HashMap<String, Object> values = (HashMap<String, Object>)get("slaves_usage_bytes"); - if (values == null) { - values = new HashMap<>(); - } - - HashMap<String, Long> usageBytes = new HashMap<>(); - - for(String key : values.keySet()) { - String value = (String)values.get(key); - usageBytes.put(key, Long.parseLong(value)); - } - - return usageBytes; - } - - /** - * Returns the stack ID for this license pool. Valid values are: - * <ul> - * <li>"download-trial"</li> - * <li>"enterprise"</li> - * <li>"forwarder"</li> - * <li>"free"</li></ul> - * - * @return The license pool stack ID, or {@code null} if not specified. - */ - public String getStackId() { - return getString("stack_id", null); - } - - /** - * Returns the usage of indexing volume for this license pool. - * - * @return This license pool's usage, in bytes. - */ - public long getUsedBytes() { - return getLong("used_bytes", 0); - } - - /** - * Sets whether to append or overwrite slaves to this license pool. - * - * @param appendSlaves {@code true} to append slaves, {@code false} to - * overwrite slaves. - */ - public void setAppendSlaves(boolean appendSlaves) { - setCacheValue("append_slaves", appendSlaves); - } - - /** - * Sets the description of this license pool. - * - * @param description The description. - */ - public void setDescription(String description) { - setCacheValue("description", description); - } - - /** - * Sets the byte quota of this license pool. - * - * @param quota The indexing quota of this license pool, specified as: - * <ul><li><i>number</i></li> - * <li><i>number</i> followed by "MB" or "GB" (for example, "10GB")</li> - * <li>"MAX" (Only one license pool can have "MAX" size in a stack.)</li> - * </ul> - */ - public void setQuota(String quota) { - setCacheValue("quota", quota); - } - - /** - * Sets the list of slaves that are members of this license pool. - * - * @param slaves The comma-separated list of slaves. Use an asterisk ("*") - * to accept all slaves. - */ - public void setSlaves(String slaves) { - setCacheValue("slaves", slaves); - } - - /** - * Sets the list of slaves that are members of this license pool. - * - * @param slaves The array of slaves. To accept all slaves, use an - * array with a single asterisk element ("*"). - */ - public void setSlaves(String[] slaves) { - setSlaves(Util.join(",", slaves)); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.HashMap; +import java.util.Map; + +/** +* The {@code LicensePool} class represents a license pool, which is made up +* of a single license master and zero or more license slave instances of Splunk +* that are configured to use the licensing volume from a set license or license + * stack. + */ +public class LicensePool extends Entity { + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The license pool endpoint. + */ + LicensePool(Service service, String path) { + super(service, path); + } + + /** + * Returns the description of this license pool. + * + * @return The description, or {@code null} if not specified. + */ + public String getDescription() { + return getString("description", null); + } + + /** + * Returns the indexing quota for this license pool. + * + * @return A string containing the indexing quota in bytes, or "MAX" to + * indicate the maximum amount that is allowed. + */ + public String getQuota() { + return getString("quota", "0"); + } + + /** + * Returns the list of slaves for this license pool. + * + * @return A comma-separated list of slaves by ID, or {@code null} if not + * specified. + */ + public String[] getSlaves() { + if (toUpdate.containsKey("slaves")) { + String value = (String)toUpdate.get("slaves"); + return value.split(","); + } + else { + return getStringArray("slaves", null); + } + } + + /** + * Returns the usage of indexing volume by slave licenses in this license + * pool. + * + * @return A map from each slave GUID to the number of bytes it is using. + */ + public Map<String, Long> getSlavesUsageBytes() { + @SuppressWarnings("unchecked") + HashMap<String, Object> values = (HashMap<String, Object>)get("slaves_usage_bytes"); + if (values == null) { + values = new HashMap<>(); + } + + HashMap<String, Long> usageBytes = new HashMap<>(); + + for(String key : values.keySet()) { + String value = (String)values.get(key); + usageBytes.put(key, Long.parseLong(value)); + } + + return usageBytes; + } + + /** + * Returns the stack ID for this license pool. Valid values are: + * <ul> + * <li>"download-trial"</li> + * <li>"enterprise"</li> + * <li>"forwarder"</li> + * <li>"free"</li></ul> + * + * @return The license pool stack ID, or {@code null} if not specified. + */ + public String getStackId() { + return getString("stack_id", null); + } + + /** + * Returns the usage of indexing volume for this license pool. + * + * @return This license pool's usage, in bytes. + */ + public long getUsedBytes() { + return getLong("used_bytes", 0); + } + + /** + * Sets whether to append or overwrite slaves to this license pool. + * + * @param appendSlaves {@code true} to append slaves, {@code false} to + * overwrite slaves. + */ + public void setAppendSlaves(boolean appendSlaves) { + setCacheValue("append_slaves", appendSlaves); + } + + /** + * Sets the description of this license pool. + * + * @param description The description. + */ + public void setDescription(String description) { + setCacheValue("description", description); + } + + /** + * Sets the byte quota of this license pool. + * + * @param quota The indexing quota of this license pool, specified as: + * <ul><li><i>number</i></li> + * <li><i>number</i> followed by "MB" or "GB" (for example, "10GB")</li> + * <li>"MAX" (Only one license pool can have "MAX" size in a stack.)</li> + * </ul> + */ + public void setQuota(String quota) { + setCacheValue("quota", quota); + } + + /** + * Sets the list of slaves that are members of this license pool. + * + * @param slaves The comma-separated list of slaves. Use an asterisk ("*") + * to accept all slaves. + */ + public void setSlaves(String slaves) { + setCacheValue("slaves", slaves); + } + + /** + * Sets the list of slaves that are members of this license pool. + * + * @param slaves The array of slaves. To accept all slaves, use an + * array with a single asterisk element ("*"). + */ + public void setSlaves(String[] slaves) { + setSlaves(Util.join(",", slaves)); + } +} diff --git a/splunk/src/main/java/com/splunk/ModularInputKind.java b/splunk/src/main/java/com/splunk/ModularInputKind.java index 9f5857f6..c7d17f8b 100644 --- a/splunk/src/main/java/com/splunk/ModularInputKind.java +++ b/splunk/src/main/java/com/splunk/ModularInputKind.java @@ -1,109 +1,109 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ -package com.splunk; - -import java.util.HashMap; -import java.util.Map; - -/** - * The {@code ModularInputKind} class represents a particular modular input. - * The actual inputs of this kind can be accessed from the - * {@code InputCollection} object. - */ -public class ModularInputKind extends Entity { - protected Map<String, Map<String,String>> args; - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The entity's endpoint. - */ - ModularInputKind(Service service, String path) { - super(service, path); - Map<String, Map<String, Map<String,String>>> endpoint = - (Map<String, Map<String, Map<String,String>>>)get("endpoint"); - this.args = endpoint.get("args"); - } - - /** - * Returns an argument map that contains the argument names as keys, and the - * {@code ModularInputKindArgument}s as corresponding values. - * - * @return A {@code Map} containing the argument key-value pairs. - */ - public Map<String, ModularInputKindArgument> getArguments() { - Map<String, ModularInputKindArgument> arguments = new HashMap<>(); - for (String argumentName : args.keySet()) { - arguments.put(argumentName, getArgument(argumentName)); - } - return arguments; - } - - /** - * Returns the streaming mode of this modular input kind. - * - * @return The streaming mode ("xml" or "simple"). - */ - public String getStreamingMode() { - return getString("streaming_mode"); - } - - /** - * Returns a map-like object representing a particular argument of this - * modular input kind. - * - * @param argumentName The name of the argument to retrieve. - * @return A {@code ModularInputKindArgument} object representing the given - * argument, or {@code null} if the argument does not exist. - */ - public ModularInputKindArgument getArgument(String argumentName) { - if (this.args.get(argumentName) != null) { - return new ModularInputKindArgument(this.args.get(argumentName)); - } else { - return null; - } - } - - /** - * Returns the description of this modular input kind. - * - * @return A string containing the description. - */ - public String getDescription() { - return getString("description", null); - } - - /** - * Returns the title of this modular input kind, which is also displayed in - * Splunk Web (rather than the name used in the REST API). - * - * @return A string containing the title. - */ - public String getTitle() { - return getString("title", null); - } - - /** - * Indicates whether this modular input kind has a given argument. - * - * @param argumentName The argument to look up. - * @return {@code true} if the argument exists, {@code false} if not. - */ - public boolean hasArgument(String argumentName) { - return this.args.containsKey(argumentName); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.splunk; + +import java.util.HashMap; +import java.util.Map; + +/** + * The {@code ModularInputKind} class represents a particular modular input. + * The actual inputs of this kind can be accessed from the + * {@code InputCollection} object. + */ +public class ModularInputKind extends Entity { + protected Map<String, Map<String,String>> args; + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The entity's endpoint. + */ + ModularInputKind(Service service, String path) { + super(service, path); + Map<String, Map<String, Map<String,String>>> endpoint = + (Map<String, Map<String, Map<String,String>>>)get("endpoint"); + this.args = endpoint.get("args"); + } + + /** + * Returns an argument map that contains the argument names as keys, and the + * {@code ModularInputKindArgument}s as corresponding values. + * + * @return A {@code Map} containing the argument key-value pairs. + */ + public Map<String, ModularInputKindArgument> getArguments() { + Map<String, ModularInputKindArgument> arguments = new HashMap<>(); + for (String argumentName : args.keySet()) { + arguments.put(argumentName, getArgument(argumentName)); + } + return arguments; + } + + /** + * Returns the streaming mode of this modular input kind. + * + * @return The streaming mode ("xml" or "simple"). + */ + public String getStreamingMode() { + return getString("streaming_mode"); + } + + /** + * Returns a map-like object representing a particular argument of this + * modular input kind. + * + * @param argumentName The name of the argument to retrieve. + * @return A {@code ModularInputKindArgument} object representing the given + * argument, or {@code null} if the argument does not exist. + */ + public ModularInputKindArgument getArgument(String argumentName) { + if (this.args.get(argumentName) != null) { + return new ModularInputKindArgument(this.args.get(argumentName)); + } else { + return null; + } + } + + /** + * Returns the description of this modular input kind. + * + * @return A string containing the description. + */ + public String getDescription() { + return getString("description", null); + } + + /** + * Returns the title of this modular input kind, which is also displayed in + * Splunk Web (rather than the name used in the REST API). + * + * @return A string containing the title. + */ + public String getTitle() { + return getString("title", null); + } + + /** + * Indicates whether this modular input kind has a given argument. + * + * @param argumentName The argument to look up. + * @return {@code true} if the argument exists, {@code false} if not. + */ + public boolean hasArgument(String argumentName) { + return this.args.containsKey(argumentName); + } +} diff --git a/splunk/src/main/java/com/splunk/MultiResultsReader.java b/splunk/src/main/java/com/splunk/MultiResultsReader.java index cc57b97f..aa1cfa5c 100644 --- a/splunk/src/main/java/com/splunk/MultiResultsReader.java +++ b/splunk/src/main/java/com/splunk/MultiResultsReader.java @@ -1,63 +1,63 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.IOException; -import java.util.Iterator; - -/** - * The {@code MultiResultsReader} class represents a streaming reader - * for Splunk search results. Using {@code <T extends ResultsReader>} allows - * specialization of {@code T} in subclasses of {@code MultiResultsReader}, such - * as {@link MultiResultsReaderXml} and {@link MultiResultsReaderJson}. - */ -public class MultiResultsReader<T extends ResultsReader> - extends StreamIterableBase<SearchResults> { - private T resultsReader; - - MultiResultsReader(T resultsReader) throws IOException { - this.resultsReader = resultsReader; - } - - /** - * Returns an iterator over the sets of results from this reader. - * @return An iterator. - */ - @Override - public final Iterator<SearchResults> iterator() { - return super.iterator(); - } - - /** - * Closes the reader and releases resources. - * @throws IOException If reader is not closed. - */ - public final void close() throws IOException { - resultsReader.close(); - } - - protected final T getNextElement() { - try { - if (!resultsReader.resetIteratorToNextSet()) { - return null; - } - return resultsReader; - } catch (IOException e) { - throw new RuntimeException(e); - } - } +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.IOException; +import java.util.Iterator; + +/** + * The {@code MultiResultsReader} class represents a streaming reader + * for Splunk search results. Using {@code <T extends ResultsReader>} allows + * specialization of {@code T} in subclasses of {@code MultiResultsReader}, such + * as {@link MultiResultsReaderXml} and {@link MultiResultsReaderJson}. + */ +public class MultiResultsReader<T extends ResultsReader> + extends StreamIterableBase<SearchResults> { + private T resultsReader; + + MultiResultsReader(T resultsReader) throws IOException { + this.resultsReader = resultsReader; + } + + /** + * Returns an iterator over the sets of results from this reader. + * @return An iterator. + */ + @Override + public final Iterator<SearchResults> iterator() { + return super.iterator(); + } + + /** + * Closes the reader and releases resources. + * @throws IOException If reader is not closed. + */ + public final void close() throws IOException { + resultsReader.close(); + } + + protected final T getNextElement() { + try { + if (!resultsReader.resetIteratorToNextSet()) { + return null; + } + return resultsReader; + } catch (IOException e) { + throw new RuntimeException(e); + } + } } \ No newline at end of file diff --git a/splunk/src/main/java/com/splunk/MultiResultsReaderJson.java b/splunk/src/main/java/com/splunk/MultiResultsReaderJson.java index 77a2c9f6..3d360fbd 100644 --- a/splunk/src/main/java/com/splunk/MultiResultsReaderJson.java +++ b/splunk/src/main/java/com/splunk/MultiResultsReaderJson.java @@ -1,42 +1,42 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.IOException; -import java.io.InputStream; - -/** - * The {@code MultiResultsReaderJson} class represents a streaming JSON reader - * for Splunk search results. This reader supports streams from export searches, - * which might return one of more previews before returning final results. - */ -public class MultiResultsReaderJson - extends MultiResultsReader<ResultsReaderJson> { - /** - * Class constructor. - * - * Constructs a streaming JSON reader for the event stream. You should only - * attempt to parse a JSON stream with this reader. Unpredictable results - * may occur if you try to parse a stream with a different format. - * - * @param inputStream The JSON stream to parse. - * @throws IOException The IOException instance - */ - public MultiResultsReaderJson(InputStream inputStream) throws IOException { - super(new ResultsReaderJson(inputStream, true)); - } +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.IOException; +import java.io.InputStream; + +/** + * The {@code MultiResultsReaderJson} class represents a streaming JSON reader + * for Splunk search results. This reader supports streams from export searches, + * which might return one of more previews before returning final results. + */ +public class MultiResultsReaderJson + extends MultiResultsReader<ResultsReaderJson> { + /** + * Class constructor. + * + * Constructs a streaming JSON reader for the event stream. You should only + * attempt to parse a JSON stream with this reader. Unpredictable results + * may occur if you try to parse a stream with a different format. + * + * @param inputStream The JSON stream to parse. + * @throws IOException The IOException instance + */ + public MultiResultsReaderJson(InputStream inputStream) throws IOException { + super(new ResultsReaderJson(inputStream, true)); + } } \ No newline at end of file diff --git a/splunk/src/main/java/com/splunk/MultiResultsReaderXml.java b/splunk/src/main/java/com/splunk/MultiResultsReaderXml.java index 53088389..3675dc57 100644 --- a/splunk/src/main/java/com/splunk/MultiResultsReaderXml.java +++ b/splunk/src/main/java/com/splunk/MultiResultsReaderXml.java @@ -1,42 +1,42 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.IOException; -import java.io.InputStream; - -/** - * The {@code MultiResultsReaderXml} class represents a streaming XML reader for - * Splunk search results. This reader supports streams from export searches, - * which might return one of more previews before returning final results. - */ -public class MultiResultsReaderXml - extends MultiResultsReader<ResultsReaderXml> { - /** - * Class constructor. - * - * Constructs a streaming XML reader for the event stream. You should only - * attempt to parse an XML stream with this reader. Unpredictable results - * may occur if you try to parse a stream with a different format. - * - * @param inputStream The XML stream to parse. - * @throws IOException The IOException instance - */ - public MultiResultsReaderXml(InputStream inputStream) throws IOException { - super(new ResultsReaderXml(inputStream, true)); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.IOException; +import java.io.InputStream; + +/** + * The {@code MultiResultsReaderXml} class represents a streaming XML reader for + * Splunk search results. This reader supports streams from export searches, + * which might return one of more previews before returning final results. + */ +public class MultiResultsReaderXml + extends MultiResultsReader<ResultsReaderXml> { + /** + * Class constructor. + * + * Constructs a streaming XML reader for the event stream. You should only + * attempt to parse an XML stream with this reader. Unpredictable results + * may occur if you try to parse a stream with a different format. + * + * @param inputStream The XML stream to parse. + * @throws IOException The IOException instance + */ + public MultiResultsReaderXml(InputStream inputStream) throws IOException { + super(new ResultsReaderXml(inputStream, true)); + } +} diff --git a/splunk/src/main/java/com/splunk/PasswordCollection.java b/splunk/src/main/java/com/splunk/PasswordCollection.java index 1379a404..1a8da1c1 100644 --- a/splunk/src/main/java/com/splunk/PasswordCollection.java +++ b/splunk/src/main/java/com/splunk/PasswordCollection.java @@ -1,152 +1,152 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -/** - * The {@code PasswordCollection} class represents a collection of credentials. - */ -public class PasswordCollection extends EntityCollection<Password> { - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - */ - PasswordCollection(Service service) { - super(service, service.passwordEndPoint, Password.class); - } - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - */ - PasswordCollection(Service service, Args args) { - super(service, service.passwordEndPoint, Password.class, args); - } - - /** - * Creates a credential with a username and password. - * - * @param name The username. - * @param password The password. - * - * @return The new credential. - */ - public Password create(String name, String password) { - if(checkForWildcards()){ - throw new IllegalArgumentException("While creating StoragePasswords, namespace cannot have wildcards."); - } - Args args = new Args("password", password); - return create(name, args); - } - - /** - * Creates a credential with a username, password, and realm. - * - * @param name The username. - * @param password The password. - * @param realm The credential realm. - * @return The new credential. - */ - public Password create(String name, String password, String realm) { - if(checkForWildcards()){ - throw new IllegalArgumentException("While creating StoragePasswords, namespace cannot have wildcards."); - } - Args args = new Args(); - args.put("password", password); - args.put("realm", realm); - return create(name, args); - } - - /** - * Get a credential with realm and name. - * - * @param realm The credential realm. - * @param name The username. - * @return The credential, or null if not found. - */ - public Password get(String realm, String name) { - return super.get(String.format("%s:%s:", realm, name)); - } - - @Override - public Password get(Object key) { - // Make it compatible with the old way (low-efficient) - if (key instanceof String keyInst && !keyInst.contains(":")) { - return getByUsername(keyInst); - } - return super.get(key); - } - - /** - * Remove a credential with realm and name. - * - * @param realm The credential realm. - * @param name The username. - * @return The removed credential, or null if not found. - */ - public Password remove(String realm, String name) { - if(checkForWildcards()){ - throw new IllegalArgumentException("app context must be specified when removing a password."); - } - return super.remove(String.format("%s:%s:", realm, name)); - } - - @Override - public Password remove(String key) { - if(checkForWildcards()){ - throw new IllegalArgumentException("app context must be specified when removing a password."); - } - // Make it compatible with the old way (low-efficient) - if (!key.contains(":")) { - Password password = getByUsername(key); - validate(); - if (password == null) return null; - password.remove(); - // by invalidating any access to items will get refreshed - invalidate(); - return password; - } - return super.remove(key); - } - - @Override - public boolean containsKey(Object key) { - if (key instanceof String keyInst && !keyInst.contains(":")) { - return getByUsername(keyInst) != null; - } - return super.containsKey(key); - } - - private Password getByUsername(String name) { - for (Password password : this.values()) { - if (password.getUsername().equals(name)) return password; - } - return null; - } - - private boolean checkForWildcards(){ - boolean isWildCard = false; - if(("-").equals(service.getOwner()) || ("-").equals(service.getApp())){ - isWildCard = true; - } - return isWildCard; - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +/** + * The {@code PasswordCollection} class represents a collection of credentials. + */ +public class PasswordCollection extends EntityCollection<Password> { + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + */ + PasswordCollection(Service service) { + super(service, service.passwordEndPoint, Password.class); + } + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + */ + PasswordCollection(Service service, Args args) { + super(service, service.passwordEndPoint, Password.class, args); + } + + /** + * Creates a credential with a username and password. + * + * @param name The username. + * @param password The password. + * + * @return The new credential. + */ + public Password create(String name, String password) { + if(checkForWildcards()){ + throw new IllegalArgumentException("While creating StoragePasswords, namespace cannot have wildcards."); + } + Args args = new Args("password", password); + return create(name, args); + } + + /** + * Creates a credential with a username, password, and realm. + * + * @param name The username. + * @param password The password. + * @param realm The credential realm. + * @return The new credential. + */ + public Password create(String name, String password, String realm) { + if(checkForWildcards()){ + throw new IllegalArgumentException("While creating StoragePasswords, namespace cannot have wildcards."); + } + Args args = new Args(); + args.put("password", password); + args.put("realm", realm); + return create(name, args); + } + + /** + * Get a credential with realm and name. + * + * @param realm The credential realm. + * @param name The username. + * @return The credential, or null if not found. + */ + public Password get(String realm, String name) { + return super.get(String.format("%s:%s:", realm, name)); + } + + @Override + public Password get(Object key) { + // Make it compatible with the old way (low-efficient) + if (key instanceof String keyInst && !keyInst.contains(":")) { + return getByUsername(keyInst); + } + return super.get(key); + } + + /** + * Remove a credential with realm and name. + * + * @param realm The credential realm. + * @param name The username. + * @return The removed credential, or null if not found. + */ + public Password remove(String realm, String name) { + if(checkForWildcards()){ + throw new IllegalArgumentException("app context must be specified when removing a password."); + } + return super.remove(String.format("%s:%s:", realm, name)); + } + + @Override + public Password remove(String key) { + if(checkForWildcards()){ + throw new IllegalArgumentException("app context must be specified when removing a password."); + } + // Make it compatible with the old way (low-efficient) + if (!key.contains(":")) { + Password password = getByUsername(key); + validate(); + if (password == null) return null; + password.remove(); + // by invalidating any access to items will get refreshed + invalidate(); + return password; + } + return super.remove(key); + } + + @Override + public boolean containsKey(Object key) { + if (key instanceof String keyInst && !keyInst.contains(":")) { + return getByUsername(keyInst) != null; + } + return super.containsKey(key); + } + + private Password getByUsername(String name) { + for (Password password : this.values()) { + if (password.getUsername().equals(name)) return password; + } + return null; + } + + private boolean checkForWildcards(){ + boolean isWildCard = false; + if(("-").equals(service.getOwner()) || ("-").equals(service.getApp())){ + isWildCard = true; + } + return isWildCard; + } +} diff --git a/splunk/src/main/java/com/splunk/PivotSpecification.java b/splunk/src/main/java/com/splunk/PivotSpecification.java index c9ef4156..3e12ad03 100644 --- a/splunk/src/main/java/com/splunk/PivotSpecification.java +++ b/splunk/src/main/java/com/splunk/PivotSpecification.java @@ -1,491 +1,491 @@ -/* - * Copyright 2014 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ -package com.splunk; - -import com.google.gson.*; - -import java.util.*; - -/** - * PivotSpecification represents a pivot to be done on a particular data model object. The user creates a - * PivotSpecification on some data model object, adds filters, row splits, column splits, and cell values, - * then calls the pivot method to query splunkd and get a set of SPL queries corresponding to this specification. - */ -public class PivotSpecification { - private static GsonBuilder gson = new GsonBuilder(); - - private DataModelObject dataModelObject; - private String accelerationNamespace = null; - - private List<PivotColumnSplit> columns = new ArrayList<>(); - private List<PivotFilter> filters = new ArrayList<>(); - private List<PivotCellValue> cells = new ArrayList<>(); - private List<PivotRowSplit> rows = new ArrayList<>(); - - PivotSpecification(DataModelObject dataModelObject) { - this.dataModelObject = dataModelObject; - if (dataModelObject.getDataModel().isAccelerated()) { - this.accelerationNamespace = dataModelObject.getDataModel().getName(); - } else { - this.accelerationNamespace = null; - } - } - - /** - * Set the namespace to use for this acceleration, usually the name of a data model. A value of null will set no - * namespace for acceleration. - * - * @param namespace a string specifying a namespace. - * @return PivotSpecification instance - */ - public PivotSpecification setAccelerationNamespace(String namespace) { - this.accelerationNamespace = namespace; - return this; - } - - /** - * Set a job with a query ending in tscollect, usually generated by createLocalAccelerationJob on a - * DataModelObject instance, as the acceleration cache for this pivot. - * - * @param sid the SID of a job. - * @return PivotSpecification instance - */ - public PivotSpecification setAccelerationJob(String sid) { - if (sid == null) { - throw new IllegalArgumentException("Sid to use for acceleration must not be null."); - } else { - this.accelerationNamespace = "sid=" + sid; - } - return this; - } - - /** - * Set a job with a query ending in tscollect, usually generated by createLocalAccelerationJob on a - * DataModelObject instance, as the acceleration cache for this pivot. - * - * @param job a Job object. - * @return PivotSpecification instance - */ - public PivotSpecification setAccelerationJob(Job job) { - setAccelerationJob(job.getSid()); - return this; - } - - /** - * @return the acceleration namespace to use in this pivot. - */ - public String getAccelerationNamespace() { - return this.accelerationNamespace; - } - - private void assertCorrectlyTypedField(String fieldName, FieldType[] acceptableTypes) { - DataModelField field = this.dataModelObject.getField(fieldName); - if (field == null) { - throw new IllegalArgumentException("No such field named " + fieldName); - } else if (!Arrays.asList(acceptableTypes).contains(field.getType())) { - StringBuilder errorMessage = new StringBuilder(); - errorMessage.append("Expected a field of one of the following types: "); - boolean first = true; - for (FieldType t : acceptableTypes) { - if (!first) errorMessage.append(", "); - errorMessage.append(t.toString()); - first = false; - } - errorMessage.append("; found type " + field.getType().toString()); - throw new IllegalArgumentException(errorMessage.toString()); - } - } - - private void assertCorrectlyTypedField(String field, FieldType acceptableType) { - assertCorrectlyTypedField(field, new FieldType[] { acceptableType }); - } - - /** - * Add a filter on a boolean valued field. The filter will be a constraint of the form - * - * field `comparison` compareTo - * - * for example - * - * is_remote = false - * - * @param field the name of the field - * @param comparison a comparison operator for the filter - * @param compareTo the value to compare the field to - * @return the PivotSpecification you are operating on. - */ - public PivotSpecification addFilter(String field, BooleanComparison comparison, boolean compareTo) { - assertCorrectlyTypedField(field, FieldType.BOOLEAN); - - BooleanPivotFilter filter = new BooleanPivotFilter(this.dataModelObject, field, comparison, compareTo); - filters.add(filter); - - return this; - } - - /** - * Add a filter on a string valued field. The filter will be a constraint of the form - * - * field `comparison` compareTo - * - * for example - * - * host startswith 'boris' - * - * @param field the name of the field - * @param comparison a comparison operator for the filter - * @param comparisonValue the value to compare the field to - * @return the PivotSpecification you are operating on. - */ - public PivotSpecification addFilter(String field, StringComparison comparison, String comparisonValue) { - assertCorrectlyTypedField(field, FieldType.STRING); - - StringPivotFilter filter = new StringPivotFilter(this.dataModelObject, field, comparison, comparisonValue); - filters.add(filter); - - return this; - } - - /** - * Add a filter on an IPv4 valued field. The filter will be a constraint of the form - * - * field `comparison` compareTo - * - * for example - * - * hostip = 192.168.100.12 - * - * @param field the name of the field - * @param comparison a comparison operator for the filter - * @param comparisonValue the value to compare the field to - * @return the PivotSpecification you are operating on. - */ - public PivotSpecification addFilter(String field, IPv4Comparison comparison, String comparisonValue) { - assertCorrectlyTypedField(field, FieldType.IPV4); - - IPv4PivotFilter filter = new IPv4PivotFilter(this.dataModelObject, field, comparison, comparisonValue); - filters.add(filter); - - return this; - } - - /** - * Add a filter on a numeric field. The filter will be a constraint of the form - * - * field `comparison` compareTo - * - * for example - * - * {@code height > 6} - * - * @param field the name of the field - * @param comparison a comparison operator for the filter - * @param comparisonValue the value to compare the field to - * @return the PivotSpecification you are operating on. - */ - public PivotSpecification addFilter(String field, NumberComparison comparison, double comparisonValue) { - assertCorrectlyTypedField(field, FieldType.NUMBER); - - NumberPivotFilter filter = new NumberPivotFilter(this.dataModelObject, field, comparison, comparisonValue); - filters.add(filter); - - return this; - } - - /** - * Add a filter that limits the number of values of an aggregated field that will be allowed - * into the pivot. - * - * @param field the name of a field - * @param sortAttribute field to aggregate for limiting - * @param sortDirection whether to take the lowest or highest values of the aggregated field - * @param limit how many values of the aggregated field to take - * @param statsFunction the function to use for aggregation - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addFilter(String field, String sortAttribute, - SortDirection sortDirection, int limit, StatsFunction statsFunction) { - if (!dataModelObject.containsField(field)) { - throw new IllegalArgumentException("No such field " + sortAttribute); - } - assertCorrectlyTypedField( - sortAttribute, - new FieldType[] { FieldType.STRING, FieldType.NUMBER, FieldType.OBJECTCOUNT } - ); - - LimitPivotFilter filter = new LimitPivotFilter(this.dataModelObject, field, sortAttribute, - sortDirection, limit, statsFunction); - filters.add(filter); - - return this; - } - - /** - * Add a row split on a numeric or string valued field, splitting on each distinct value of the field. - * - * @param field name of the field to split on - * @param label a human readable name for this set of rows - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addRowSplit(String field, String label) { - assertCorrectlyTypedField(field, new FieldType[] { FieldType.NUMBER, FieldType.STRING }); - - FieldType t = this.dataModelObject.getField(field).getType(); - if (t == FieldType.NUMBER) { - rows.add(new NumberPivotRowSplit(this.dataModelObject, field, label)); - } else if (t == FieldType.STRING) { - rows.add(new StringPivotRowSplit(this.dataModelObject, field, label)); - } else { - throw new IllegalArgumentException("Field not of type number or string despite precondition asserting so."); - } - - return this; - } - - /** - * Add a row split on a numeric field, splitting into numeric ranges. - * - * This split generates bins with edges equivalent to the - * classic loop {@code 'for i in <start> to <end> by <step>' } but with a maximum - * number of bins {@code <limit> }. This dispatches to the stats and xyseries search commands. - * See their documentation for more details. - * - * - * @param field The field to split on - * @param label a human readable name for this set of rows - * @param start the value of the start of the first range, or null to take the lowest value in the events. - * @param end the value for the end of the last range, or null to take the highest value in the events. - * @param step the width of each range, or null to have Splunk calculate it. - * @param limit the maximum number of ranges to split into, or null for no limit. - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addRowSplit(String field, String label, Integer start, Integer end, - Integer step, Integer limit) { - assertCorrectlyTypedField(field, FieldType.NUMBER); - - PivotRowSplit split = new RangePivotRowSplit(this.dataModelObject, field, label, start, end, step, limit); - rows.add(split); - - return this; - } - - /** - * Add a row split on a boolean valued field. - * - * @param field String value - * @param label String value - * @param trueDisplayValue the string to display in the true valued row label. - * @param falseDisplayValue the string to display in the false valued row label; - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addRowSplit(String field, String label, - String trueDisplayValue, String falseDisplayValue) { - assertCorrectlyTypedField(field, FieldType.BOOLEAN); - - PivotRowSplit split = new BooleanPivotRowSplit(this.dataModelObject, field, label, - trueDisplayValue, falseDisplayValue); - rows.add(split); - - return this; - } - - /** - * Add a row split on a timestamp valued field, binned by the specified bucket size. - * - * @param field the name of the field to split on. - * @param label a human readable name for this set of rows - * @param binning the size of bins to use - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addRowSplit(String field, String label, TimestampBinning binning) { - assertCorrectlyTypedField(field, FieldType.TIMESTAMP); - - PivotRowSplit split = new TimestampPivotRowSplit(this.dataModelObject, field, label, binning); - rows.add(split); - - return this; - } - - /** - * Add a column split on a string or number valued field, producing a column for - * each distinct value of the field. - * - * @param field the field to split on. - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addColumnSplit(String field) { - assertCorrectlyTypedField(field, new FieldType[] { FieldType.NUMBER, FieldType.STRING }); - - FieldType t = this.dataModelObject.getField(field).getType(); - - if (t == FieldType.NUMBER) { - columns.add(new NumericPivotColumnSplit(this.dataModelObject, field)); - } else if (t == FieldType.STRING) { - columns.add(new StringPivotColumnSplit(this.dataModelObject, field)); - } - - return this; - } - - /** - * Add a column split on a numeric field, splitting the values into ranges. - * - * @param field the field to split on. - * @param start the value of the start of the first range, or null to take the lowest value in the events. - * @param end the value for the end of the last range, or null to take the highest value in the events. - * @param step the width of each range, or null to have Splunk calculate it. - * @param limit the maximum number of ranges to split into, or null for no limit. - * @return The PivotSpecification you are modifying. - */ - public PivotSpecification addColumnSplit(String field, Integer start, Integer end, Integer step, Integer limit) { - assertCorrectlyTypedField(field, FieldType.NUMBER); - - PivotColumnSplit split = new RangePivotColumnSplit(this.dataModelObject, field, start, end, step, limit); - - columns.add(split); - return this; - } - - /** - * Add a column split on a boolean valued field. - * - * @param field the field to split on. - * @param trueDisplayValue the string to display in the true valued column label. - * @param falseDisplayValue the string to display in the false valued column label. - * @return the PivotSpecification you are working on. - */ - public PivotSpecification addColumnSplit(String field, String trueDisplayValue, String falseDisplayValue) { - assertCorrectlyTypedField(field, FieldType.BOOLEAN); - - PivotColumnSplit split = new BooleanPivotColumnSplit(this.dataModelObject, field, - trueDisplayValue, falseDisplayValue); - - columns.add(split); - return this; - } - - /** - * Add a column split on a timestamp valued field. - * - * @param field the field to split on. - * @param binning what time periods to use for binning valued of the field. - * @return the PivotSpecification you are working on. - */ - public PivotSpecification addColumnSplit(String field, TimestampBinning binning) { - assertCorrectlyTypedField(field, FieldType.TIMESTAMP); - - PivotColumnSplit split = new TimestampPivotColumnSplit(this.dataModelObject, field, binning); - - columns.add(split); - return this; - } - - /** - * Add an aggregate to each cell of the pivot. - * - * @param field the field to aggregate. - * @param label a human readable name for this aggregate. - * @param statsFunction the function to use for aggregation. - * @return the PivotSpecification you are working on. - */ - public PivotSpecification addCellValue(String field, String label, StatsFunction statsFunction) { - cells.add(new PivotCellValue(this.dataModelObject, field, label, statsFunction)); - - return this; - } - - /** - * @return a JSON serialization of this object. - */ - JsonObject toJson() { - JsonObject root = new JsonObject(); - - root.addProperty("dataModel", this.dataModelObject.getDataModel().getName()); - root.addProperty("baseClass", this.dataModelObject.getName()); - - JsonArray filterArray = new JsonArray(); - for (PivotFilter p : filters) { filterArray.add(p.toJson()); } - root.add("filters", filterArray); - - JsonArray rowsplitArray = new JsonArray(); - for (PivotRowSplit p : rows) { rowsplitArray.add(p.toJson()); } - root.add("rows", rowsplitArray); - - JsonArray cellvalueArray = new JsonArray(); - for (PivotCellValue p : cells) { cellvalueArray.add(p.toJson()); } - root.add("cells", cellvalueArray); - - JsonArray columnsplitArray = new JsonArray(); - for (PivotColumnSplit p : columns) { columnsplitArray.add(p.toJson()); } - root.add("columns", columnsplitArray); - - return root; - } - - /** - * @return a collection of all the filters added to this PivotSpecification. - */ - public Collection<PivotFilter> getFilters() { - return Collections.unmodifiableCollection(this.filters); - } - - /** - * @return a collection of all the row splits added to this PivotSpecification. - */ - public Collection<PivotRowSplit> getRowSplits() { - return Collections.unmodifiableCollection(this.rows); - } - - /** - * @return a collection of all the column splits added to this PivotSpecification. - */ - public Collection<PivotColumnSplit> getColumnSplits() { - return Collections.unmodifiableCollection(this.columns); - } - - /** - * @return a collection of all the cell values added to this PivotSpecification. - */ - public Collection<PivotCellValue> getCellValues() { - return Collections.unmodifiableCollection(this.cells); - } - - /** - * Query Splunk for SPL queries corresponding to this pivot. - * - * @return a Pivot object encapsulating the returned queries. - */ - public Pivot pivot() { - Service service = this.dataModelObject.getDataModel().getService(); - - Args args = new Args(); - args.put("pivot_json", toJson()); - if (this.accelerationNamespace != null) { - args.put("namespace", this.accelerationNamespace); - } - - ResponseMessage response = service.get( - "datamodel/pivot/" + this.dataModelObject.getDataModel().getName(), - args - ); - - if (response.getStatus() != 200) { - throw HttpException.create(response); - } else { - return Pivot.parseStream(service, response.getContent()); - } - } -} +/* + * Copyright 2014 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.splunk; + +import com.google.gson.*; + +import java.util.*; + +/** + * PivotSpecification represents a pivot to be done on a particular data model object. The user creates a + * PivotSpecification on some data model object, adds filters, row splits, column splits, and cell values, + * then calls the pivot method to query splunkd and get a set of SPL queries corresponding to this specification. + */ +public class PivotSpecification { + private static GsonBuilder gson = new GsonBuilder(); + + private DataModelObject dataModelObject; + private String accelerationNamespace = null; + + private List<PivotColumnSplit> columns = new ArrayList<>(); + private List<PivotFilter> filters = new ArrayList<>(); + private List<PivotCellValue> cells = new ArrayList<>(); + private List<PivotRowSplit> rows = new ArrayList<>(); + + PivotSpecification(DataModelObject dataModelObject) { + this.dataModelObject = dataModelObject; + if (dataModelObject.getDataModel().isAccelerated()) { + this.accelerationNamespace = dataModelObject.getDataModel().getName(); + } else { + this.accelerationNamespace = null; + } + } + + /** + * Set the namespace to use for this acceleration, usually the name of a data model. A value of null will set no + * namespace for acceleration. + * + * @param namespace a string specifying a namespace. + * @return PivotSpecification instance + */ + public PivotSpecification setAccelerationNamespace(String namespace) { + this.accelerationNamespace = namespace; + return this; + } + + /** + * Set a job with a query ending in tscollect, usually generated by createLocalAccelerationJob on a + * DataModelObject instance, as the acceleration cache for this pivot. + * + * @param sid the SID of a job. + * @return PivotSpecification instance + */ + public PivotSpecification setAccelerationJob(String sid) { + if (sid == null) { + throw new IllegalArgumentException("Sid to use for acceleration must not be null."); + } else { + this.accelerationNamespace = "sid=" + sid; + } + return this; + } + + /** + * Set a job with a query ending in tscollect, usually generated by createLocalAccelerationJob on a + * DataModelObject instance, as the acceleration cache for this pivot. + * + * @param job a Job object. + * @return PivotSpecification instance + */ + public PivotSpecification setAccelerationJob(Job job) { + setAccelerationJob(job.getSid()); + return this; + } + + /** + * @return the acceleration namespace to use in this pivot. + */ + public String getAccelerationNamespace() { + return this.accelerationNamespace; + } + + private void assertCorrectlyTypedField(String fieldName, FieldType[] acceptableTypes) { + DataModelField field = this.dataModelObject.getField(fieldName); + if (field == null) { + throw new IllegalArgumentException("No such field named " + fieldName); + } else if (!Arrays.asList(acceptableTypes).contains(field.getType())) { + StringBuilder errorMessage = new StringBuilder(); + errorMessage.append("Expected a field of one of the following types: "); + boolean first = true; + for (FieldType t : acceptableTypes) { + if (!first) errorMessage.append(", "); + errorMessage.append(t.toString()); + first = false; + } + errorMessage.append("; found type " + field.getType().toString()); + throw new IllegalArgumentException(errorMessage.toString()); + } + } + + private void assertCorrectlyTypedField(String field, FieldType acceptableType) { + assertCorrectlyTypedField(field, new FieldType[] { acceptableType }); + } + + /** + * Add a filter on a boolean valued field. The filter will be a constraint of the form + * + * field `comparison` compareTo + * + * for example + * + * is_remote = false + * + * @param field the name of the field + * @param comparison a comparison operator for the filter + * @param compareTo the value to compare the field to + * @return the PivotSpecification you are operating on. + */ + public PivotSpecification addFilter(String field, BooleanComparison comparison, boolean compareTo) { + assertCorrectlyTypedField(field, FieldType.BOOLEAN); + + BooleanPivotFilter filter = new BooleanPivotFilter(this.dataModelObject, field, comparison, compareTo); + filters.add(filter); + + return this; + } + + /** + * Add a filter on a string valued field. The filter will be a constraint of the form + * + * field `comparison` compareTo + * + * for example + * + * host startswith 'boris' + * + * @param field the name of the field + * @param comparison a comparison operator for the filter + * @param comparisonValue the value to compare the field to + * @return the PivotSpecification you are operating on. + */ + public PivotSpecification addFilter(String field, StringComparison comparison, String comparisonValue) { + assertCorrectlyTypedField(field, FieldType.STRING); + + StringPivotFilter filter = new StringPivotFilter(this.dataModelObject, field, comparison, comparisonValue); + filters.add(filter); + + return this; + } + + /** + * Add a filter on an IPv4 valued field. The filter will be a constraint of the form + * + * field `comparison` compareTo + * + * for example + * + * hostip = 192.168.100.12 + * + * @param field the name of the field + * @param comparison a comparison operator for the filter + * @param comparisonValue the value to compare the field to + * @return the PivotSpecification you are operating on. + */ + public PivotSpecification addFilter(String field, IPv4Comparison comparison, String comparisonValue) { + assertCorrectlyTypedField(field, FieldType.IPV4); + + IPv4PivotFilter filter = new IPv4PivotFilter(this.dataModelObject, field, comparison, comparisonValue); + filters.add(filter); + + return this; + } + + /** + * Add a filter on a numeric field. The filter will be a constraint of the form + * + * field `comparison` compareTo + * + * for example + * + * {@code height > 6} + * + * @param field the name of the field + * @param comparison a comparison operator for the filter + * @param comparisonValue the value to compare the field to + * @return the PivotSpecification you are operating on. + */ + public PivotSpecification addFilter(String field, NumberComparison comparison, double comparisonValue) { + assertCorrectlyTypedField(field, FieldType.NUMBER); + + NumberPivotFilter filter = new NumberPivotFilter(this.dataModelObject, field, comparison, comparisonValue); + filters.add(filter); + + return this; + } + + /** + * Add a filter that limits the number of values of an aggregated field that will be allowed + * into the pivot. + * + * @param field the name of a field + * @param sortAttribute field to aggregate for limiting + * @param sortDirection whether to take the lowest or highest values of the aggregated field + * @param limit how many values of the aggregated field to take + * @param statsFunction the function to use for aggregation + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addFilter(String field, String sortAttribute, + SortDirection sortDirection, int limit, StatsFunction statsFunction) { + if (!dataModelObject.containsField(field)) { + throw new IllegalArgumentException("No such field " + sortAttribute); + } + assertCorrectlyTypedField( + sortAttribute, + new FieldType[] { FieldType.STRING, FieldType.NUMBER, FieldType.OBJECTCOUNT } + ); + + LimitPivotFilter filter = new LimitPivotFilter(this.dataModelObject, field, sortAttribute, + sortDirection, limit, statsFunction); + filters.add(filter); + + return this; + } + + /** + * Add a row split on a numeric or string valued field, splitting on each distinct value of the field. + * + * @param field name of the field to split on + * @param label a human readable name for this set of rows + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addRowSplit(String field, String label) { + assertCorrectlyTypedField(field, new FieldType[] { FieldType.NUMBER, FieldType.STRING }); + + FieldType t = this.dataModelObject.getField(field).getType(); + if (t == FieldType.NUMBER) { + rows.add(new NumberPivotRowSplit(this.dataModelObject, field, label)); + } else if (t == FieldType.STRING) { + rows.add(new StringPivotRowSplit(this.dataModelObject, field, label)); + } else { + throw new IllegalArgumentException("Field not of type number or string despite precondition asserting so."); + } + + return this; + } + + /** + * Add a row split on a numeric field, splitting into numeric ranges. + * + * This split generates bins with edges equivalent to the + * classic loop {@code 'for i in <start> to <end> by <step>' } but with a maximum + * number of bins {@code <limit> }. This dispatches to the stats and xyseries search commands. + * See their documentation for more details. + * + * + * @param field The field to split on + * @param label a human readable name for this set of rows + * @param start the value of the start of the first range, or null to take the lowest value in the events. + * @param end the value for the end of the last range, or null to take the highest value in the events. + * @param step the width of each range, or null to have Splunk calculate it. + * @param limit the maximum number of ranges to split into, or null for no limit. + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addRowSplit(String field, String label, Integer start, Integer end, + Integer step, Integer limit) { + assertCorrectlyTypedField(field, FieldType.NUMBER); + + PivotRowSplit split = new RangePivotRowSplit(this.dataModelObject, field, label, start, end, step, limit); + rows.add(split); + + return this; + } + + /** + * Add a row split on a boolean valued field. + * + * @param field String value + * @param label String value + * @param trueDisplayValue the string to display in the true valued row label. + * @param falseDisplayValue the string to display in the false valued row label; + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addRowSplit(String field, String label, + String trueDisplayValue, String falseDisplayValue) { + assertCorrectlyTypedField(field, FieldType.BOOLEAN); + + PivotRowSplit split = new BooleanPivotRowSplit(this.dataModelObject, field, label, + trueDisplayValue, falseDisplayValue); + rows.add(split); + + return this; + } + + /** + * Add a row split on a timestamp valued field, binned by the specified bucket size. + * + * @param field the name of the field to split on. + * @param label a human readable name for this set of rows + * @param binning the size of bins to use + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addRowSplit(String field, String label, TimestampBinning binning) { + assertCorrectlyTypedField(field, FieldType.TIMESTAMP); + + PivotRowSplit split = new TimestampPivotRowSplit(this.dataModelObject, field, label, binning); + rows.add(split); + + return this; + } + + /** + * Add a column split on a string or number valued field, producing a column for + * each distinct value of the field. + * + * @param field the field to split on. + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addColumnSplit(String field) { + assertCorrectlyTypedField(field, new FieldType[] { FieldType.NUMBER, FieldType.STRING }); + + FieldType t = this.dataModelObject.getField(field).getType(); + + if (t == FieldType.NUMBER) { + columns.add(new NumericPivotColumnSplit(this.dataModelObject, field)); + } else if (t == FieldType.STRING) { + columns.add(new StringPivotColumnSplit(this.dataModelObject, field)); + } + + return this; + } + + /** + * Add a column split on a numeric field, splitting the values into ranges. + * + * @param field the field to split on. + * @param start the value of the start of the first range, or null to take the lowest value in the events. + * @param end the value for the end of the last range, or null to take the highest value in the events. + * @param step the width of each range, or null to have Splunk calculate it. + * @param limit the maximum number of ranges to split into, or null for no limit. + * @return The PivotSpecification you are modifying. + */ + public PivotSpecification addColumnSplit(String field, Integer start, Integer end, Integer step, Integer limit) { + assertCorrectlyTypedField(field, FieldType.NUMBER); + + PivotColumnSplit split = new RangePivotColumnSplit(this.dataModelObject, field, start, end, step, limit); + + columns.add(split); + return this; + } + + /** + * Add a column split on a boolean valued field. + * + * @param field the field to split on. + * @param trueDisplayValue the string to display in the true valued column label. + * @param falseDisplayValue the string to display in the false valued column label. + * @return the PivotSpecification you are working on. + */ + public PivotSpecification addColumnSplit(String field, String trueDisplayValue, String falseDisplayValue) { + assertCorrectlyTypedField(field, FieldType.BOOLEAN); + + PivotColumnSplit split = new BooleanPivotColumnSplit(this.dataModelObject, field, + trueDisplayValue, falseDisplayValue); + + columns.add(split); + return this; + } + + /** + * Add a column split on a timestamp valued field. + * + * @param field the field to split on. + * @param binning what time periods to use for binning valued of the field. + * @return the PivotSpecification you are working on. + */ + public PivotSpecification addColumnSplit(String field, TimestampBinning binning) { + assertCorrectlyTypedField(field, FieldType.TIMESTAMP); + + PivotColumnSplit split = new TimestampPivotColumnSplit(this.dataModelObject, field, binning); + + columns.add(split); + return this; + } + + /** + * Add an aggregate to each cell of the pivot. + * + * @param field the field to aggregate. + * @param label a human readable name for this aggregate. + * @param statsFunction the function to use for aggregation. + * @return the PivotSpecification you are working on. + */ + public PivotSpecification addCellValue(String field, String label, StatsFunction statsFunction) { + cells.add(new PivotCellValue(this.dataModelObject, field, label, statsFunction)); + + return this; + } + + /** + * @return a JSON serialization of this object. + */ + JsonObject toJson() { + JsonObject root = new JsonObject(); + + root.addProperty("dataModel", this.dataModelObject.getDataModel().getName()); + root.addProperty("baseClass", this.dataModelObject.getName()); + + JsonArray filterArray = new JsonArray(); + for (PivotFilter p : filters) { filterArray.add(p.toJson()); } + root.add("filters", filterArray); + + JsonArray rowsplitArray = new JsonArray(); + for (PivotRowSplit p : rows) { rowsplitArray.add(p.toJson()); } + root.add("rows", rowsplitArray); + + JsonArray cellvalueArray = new JsonArray(); + for (PivotCellValue p : cells) { cellvalueArray.add(p.toJson()); } + root.add("cells", cellvalueArray); + + JsonArray columnsplitArray = new JsonArray(); + for (PivotColumnSplit p : columns) { columnsplitArray.add(p.toJson()); } + root.add("columns", columnsplitArray); + + return root; + } + + /** + * @return a collection of all the filters added to this PivotSpecification. + */ + public Collection<PivotFilter> getFilters() { + return Collections.unmodifiableCollection(this.filters); + } + + /** + * @return a collection of all the row splits added to this PivotSpecification. + */ + public Collection<PivotRowSplit> getRowSplits() { + return Collections.unmodifiableCollection(this.rows); + } + + /** + * @return a collection of all the column splits added to this PivotSpecification. + */ + public Collection<PivotColumnSplit> getColumnSplits() { + return Collections.unmodifiableCollection(this.columns); + } + + /** + * @return a collection of all the cell values added to this PivotSpecification. + */ + public Collection<PivotCellValue> getCellValues() { + return Collections.unmodifiableCollection(this.cells); + } + + /** + * Query Splunk for SPL queries corresponding to this pivot. + * + * @return a Pivot object encapsulating the returned queries. + */ + public Pivot pivot() { + Service service = this.dataModelObject.getDataModel().getService(); + + Args args = new Args(); + args.put("pivot_json", toJson()); + if (this.accelerationNamespace != null) { + args.put("namespace", this.accelerationNamespace); + } + + ResponseMessage response = service.get( + "datamodel/pivot/" + this.dataModelObject.getDataModel().getName(), + args + ); + + if (response.getStatus() != 200) { + throw HttpException.create(response); + } else { + return Pivot.parseStream(service, response.getContent()); + } + } +} diff --git a/splunk/src/main/java/com/splunk/RequestMessage.java b/splunk/src/main/java/com/splunk/RequestMessage.java index 27dda740..a44ceebf 100644 --- a/splunk/src/main/java/com/splunk/RequestMessage.java +++ b/splunk/src/main/java/com/splunk/RequestMessage.java @@ -1,114 +1,114 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.OutputStream; -import java.util.Map; -import java.util.TreeMap; - -/** - * The {@code RequestMessage} class represents an HTTP request message including - * method, headers, and body content. - */ -public class RequestMessage { - String method = "GET"; // "GET" | "PUT" | "POST" | "DELETE" - Map<String, String> header = null; - Object content = null; - - /** Creates a new {@code RequestMessage} instance. */ - public RequestMessage() {} - - /** - * Creates a new {@code RequestMessage} instance with a given method - * - * @param method String value - */ - public RequestMessage(String method) { - this.method = method; - } - - /** - * Indicates whether the given value is a supported HTTP method. - * - * @param value The value to check. - * @return {@code true} if the value is a supported method, - * {@code false} if not. - */ - boolean checkMethod(String value) { - return - value.equalsIgnoreCase("GET") || - value.equalsIgnoreCase("PUT") || - value.equalsIgnoreCase("POST") || - value.equalsIgnoreCase("DELETE"); - } - - /** - * Returns a map of message headers. - * - * @return A {@code Map} of message headers. - */ - public Map<String, String> getHeader() { - if (this.header == null) - this.header = new TreeMap<>( - String.CASE_INSENSITIVE_ORDER); - return this.header; - } - - /** - * Returns the message's HTTP method. - * - * @return The HTTP method. - */ - public String getMethod() { - return this.method; - } - - /** - * Sets the message's HTTP method. - * - * @param value The HTTP method. - */ - public void setMethod(String value) { - value = value.toUpperCase(); - if (!checkMethod(value)) - throw new IllegalArgumentException(); - this.method = value; - } - - /** - * Returns the message body content. - * - * @return The message content. - */ - public Object getContent() { - return this.content; - } - - /** - * Sets the message body content. - * - * @param value The message content. - */ - public void setContent(String value) { - this.content = value; - } - - public void setContent(OutputStream value) { - this.content = value; - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.OutputStream; +import java.util.Map; +import java.util.TreeMap; + +/** + * The {@code RequestMessage} class represents an HTTP request message including + * method, headers, and body content. + */ +public class RequestMessage { + String method = "GET"; // "GET" | "PUT" | "POST" | "DELETE" + Map<String, String> header = null; + Object content = null; + + /** Creates a new {@code RequestMessage} instance. */ + public RequestMessage() {} + + /** + * Creates a new {@code RequestMessage} instance with a given method + * + * @param method String value + */ + public RequestMessage(String method) { + this.method = method; + } + + /** + * Indicates whether the given value is a supported HTTP method. + * + * @param value The value to check. + * @return {@code true} if the value is a supported method, + * {@code false} if not. + */ + boolean checkMethod(String value) { + return + value.equalsIgnoreCase("GET") || + value.equalsIgnoreCase("PUT") || + value.equalsIgnoreCase("POST") || + value.equalsIgnoreCase("DELETE"); + } + + /** + * Returns a map of message headers. + * + * @return A {@code Map} of message headers. + */ + public Map<String, String> getHeader() { + if (this.header == null) + this.header = new TreeMap<>( + String.CASE_INSENSITIVE_ORDER); + return this.header; + } + + /** + * Returns the message's HTTP method. + * + * @return The HTTP method. + */ + public String getMethod() { + return this.method; + } + + /** + * Sets the message's HTTP method. + * + * @param value The HTTP method. + */ + public void setMethod(String value) { + value = value.toUpperCase(); + if (!checkMethod(value)) + throw new IllegalArgumentException(); + this.method = value; + } + + /** + * Returns the message body content. + * + * @return The message content. + */ + public Object getContent() { + return this.content; + } + + /** + * Sets the message body content. + * + * @param value The message content. + */ + public void setContent(String value) { + this.content = value; + } + + public void setContent(OutputStream value) { + this.content = value; + } +} + diff --git a/splunk/src/main/java/com/splunk/Resource.java b/splunk/src/main/java/com/splunk/Resource.java index 4dc43e27..1335a728 100644 --- a/splunk/src/main/java/com/splunk/Resource.java +++ b/splunk/src/main/java/com/splunk/Resource.java @@ -66,7 +66,7 @@ public abstract class Resource { this.service = service; this.path = service.fullpath( - path, namespace.size() == 0 ? null : namespace); + path, namespace.isEmpty() ? null : namespace); this.refreshArgs = args; } diff --git a/splunk/src/main/java/com/splunk/ResourceCollection.java b/splunk/src/main/java/com/splunk/ResourceCollection.java index b1f076f9..94f2b006 100644 --- a/splunk/src/main/java/com/splunk/ResourceCollection.java +++ b/splunk/src/main/java/com/splunk/ResourceCollection.java @@ -1,386 +1,386 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.util.*; - -/** - * The {@code ResourceCollection} abstract base class represents a collection of - * Splunk resources. - * - * @param <T> The type of members of the collection. - */ -public class ResourceCollection<T extends Resource> - extends Resource implements Map<String, T> -{ - protected LinkedHashMap<String, LinkedList<T>> - items = new LinkedHashMap<>(); - protected Class itemClass; - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The target endpoint. - * @param itemClass The class of this resource item. - */ - ResourceCollection(Service service, String path, Class itemClass) { - super(service, path); - this.itemClass = itemClass; - } - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The target endpoint. - * @param itemClass The class of this resource item. - * @param args Collection arguments that specify the number of entities to - * return and how to sort them (see {@link CollectionArgs}). - */ - ResourceCollection( - Service service, String path, Class itemClass, Args args) { - super(service, path, args); - this.itemClass = itemClass; - } - - /** {@inheritDoc} */ - public void clear() { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - public boolean containsKey(Object key) { - return validate().items.containsKey(key); - } - - /** - * Determines whether a scoped, namespace-constrained key - * exists within this collection. - * - * @param key The key to look up. - * @param namespace The namespace to constrain the search to. - * @return {@code true} if the key exists, {@code false} if not. - */ - public boolean containsKey(Object key, Args namespace) { - Util.ensureNamespaceIsExact(namespace); - validate(); - - LinkedList<T> entities = items.get(key); - if (entities == null || entities.size() == 0) return false; - String pathMatcher = service.fullpath("", namespace); - for (T entity: entities) { - if (entity.path.startsWith(pathMatcher)) { - return true; - } - } - return false; - } - - /** {@inheritDoc} */ - public boolean containsValue(Object value) { - // value should be a non-linked-list value; values are stored as linked - // lists inside our container. - LinkedList<Object> linkedList = new LinkedList<>(); - linkedList.add(value); - return validate().items.containsValue(linkedList); - } - - static Class[] itemSig = new Class[] { Service.class, String.class }; - - /** - * Creates a collection member. - * - * @param itemClass The class of the member to create. - * @param path The path to the member resource. - * @param namespace The namespace. - * @return The new member. - */ - protected T createItem(Class itemClass, String path, Args namespace) { - Constructor constructor; - try { - constructor = itemClass.getDeclaredConstructor(itemSig); - } - catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } - - T item; - try { - while (true) { - Object obj = constructor.newInstance(service, service.fullpath(path, namespace)); - //if (obj instanceof Message) { // We ignore messages sent back inline. - // continue; - //} else { - item = (T)obj; - break; - //} - } - } - catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - catch (InvocationTargetException e) { - throw new RuntimeException(e.getTargetException()); - } - catch (InstantiationException e) { - throw new RuntimeException(e); - } - - return item; - } - - /** - * Creates a collection member corresponding to a given - * Atom entry. This base implementation uses the class object that was - * passed in when the generic {@code ResourceCollection} was created. - * Subclasses may override this method to provide alternative means of - * instantiating collection members. - * - * @param entry The {@code AtomEntry} corresponding to the member to - * instantiate. - * @return The new member. - */ - protected T createItem(AtomEntry entry) { - return createItem(itemClass, itemPath(entry), namespace(entry)); - } - - /** {@inheritDoc} */ - public Set<Map.Entry<String, T>> entrySet() { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - @Override public boolean equals(Object o) { - return validate().items.equals(o); - } - - /** - * Gets the value of a given key, if it exists within this collection. - * - * @param key The key to look up. - * @return The value indexed by the key, or {@code null} if it doesn't - * exist. - * @throws SplunkException The exception to throw if there is more than one - * value represented by this key. - */ - public T get(Object key) { - validate(); - LinkedList<T> entities = items.get(key); - if (entities != null && entities.size() > 1) { - throw new SplunkException(SplunkException.AMBIGUOUS, - "Key has multiple values, specify a namespace"); - } - if (entities == null || entities.size() == 0) return null; - return entities.get(0); - } - - /** - * Gets a the value of a scoped, namespace-constrained key, if it exists - * within this collection. - * - * @param key The key to look up. - * @param namespace The namespace to constrain the search to. - * @return The value indexed by the key, or {@code null} if it doesn't - * exist. - */ - public T get(Object key, Args namespace) { - Util.ensureNamespaceIsExact(namespace); - validate(); - - LinkedList<T> entities = items.get(key); - if (entities == null || entities.size() == 0) return null; - String pathMatcher = service.fullpath("", namespace); - for (T entity: entities) { - if (entity.path.startsWith(pathMatcher)) { - return entity; - } - } - return null; - } - - @Override public int hashCode() { - return validate().items.hashCode(); - } - - /** {@inheritDoc} */ - public boolean isEmpty() { - return validate().items.isEmpty(); - } - - /** - * Returns the value to use as the key from a given Atom entry. - * Subclasses may override this value for collections that use something - * other than "title" as the key. - * - * @param entry The {@code AtomEntry} corresponding to the collection - * member. - * @return The value to use as the member's key. - */ - protected String itemKey(AtomEntry entry) { - return entry.title; - } - - /** - * Returns the value to use as the member's path from a given Atom entry. - * Subclasses may override this value to support alternative methods of - * determining a member's path. - * - * @param entry The {@code AtomEntry} corresponding to the collection - * member. - * @return The value to use as the member's path. - */ - protected String itemPath(AtomEntry entry) { - return entry.links.get("alternate"); - } - - private Args namespace(AtomEntry entry) { - Args namespace = new Args(); - - // no content? return an empty namespace. - if (entry.content == null) - return namespace; - - HashMap<String, String> entityMetadata = - (HashMap<String, String>)entry.content.get("eai:acl"); - - // If there is no ACL info, we just create an empty map - if (entityMetadata == null) { - entityMetadata = new HashMap<>(); - } - - if (entityMetadata.containsKey("owner")) - namespace.put("owner", entityMetadata.get("owner")); - if (entityMetadata.containsKey("app")) - namespace.put("app", entityMetadata.get("app")); - if (entityMetadata.containsKey("sharing")) - namespace.put("sharing", entityMetadata.get("sharing")); - return namespace; - } - - /** {@inheritDoc} */ - public Set<String> keySet() { - return validate().items.keySet(); - } - - /** - * Issues an HTTP request to list the contents of the collection resource. - * - * @return The list response message. - */ - public ResponseMessage list() { - return service.get(path, this.refreshArgs); - } - - /** - * Loads the collection resource from a given Atom feed. - * - * @param value The {@code AtomFeed} instance to load the collection from. - * @return The current {@code ResourceCollection} instance. - */ - ResourceCollection<T> load(AtomFeed value) { - super.load(value); - for (AtomEntry entry : value.entries) { - String key = itemKey(entry); - T item = createItem(entry); - if (items.containsKey(key)) { - LinkedList<T> list = items.get(key); - list.add(item); - } else { - LinkedList<T> list = new LinkedList<>(); - list.add(item); - items.put(key, list); - } - } - return this; - } - - /** {@inheritDoc} */ - public T put(String key, T value) { - throw new UnsupportedOperationException(); - } - - /** - * Copies all mappings from a given map to this map (unsupported). - * - * @param map The set of mappings to copy into this map. - */ - public void putAll(Map<? extends String, ? extends T> map) { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - @Override public ResourceCollection refresh() { - items.clear(); - ResponseMessage response = list(); - assert(response.getStatus() == 200); - - AtomFeed feed = null; - try { - feed = AtomFeed.parseStream(response.getContent()); - } catch (Exception e) { - throw new RuntimeException(e); - } - load(feed); - return this; - } - - /** {@inheritDoc} */ - public T remove(Object key) { - throw new UnsupportedOperationException(); - } - - /** {@inheritDoc} */ - public int size() { - return validate().items.size(); - } - - /** {@inheritDoc} */ - @Override public ResourceCollection<T> validate() { - super.validate(); - return this; - } - - /** {@inheritDoc} */ - public Collection<T> values() { - LinkedList<T> collection = new LinkedList<>(); - validate(); - Set<String> keySet = items.keySet(); - for (String key: keySet) { - LinkedList<T> list = items.get(key); - for (T item: list) { - collection.add(item); - } - } - return collection; - } - - /** - * Returns the number of values that a specific key represents. - * - * @param key The key to look up. - * @return The number of entity values represented by the key. - */ - public int valueSize(Object key) { - validate(); - LinkedList<T> entities = items.get(key); - if (entities == null || entities.size() == 0) return 0; - return entities.size(); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.*; + +/** + * The {@code ResourceCollection} abstract base class represents a collection of + * Splunk resources. + * + * @param <T> The type of members of the collection. + */ +public class ResourceCollection<T extends Resource> + extends Resource implements Map<String, T> +{ + protected LinkedHashMap<String, LinkedList<T>> + items = new LinkedHashMap<>(); + protected Class itemClass; + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The target endpoint. + * @param itemClass The class of this resource item. + */ + ResourceCollection(Service service, String path, Class itemClass) { + super(service, path); + this.itemClass = itemClass; + } + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The target endpoint. + * @param itemClass The class of this resource item. + * @param args Collection arguments that specify the number of entities to + * return and how to sort them (see {@link CollectionArgs}). + */ + ResourceCollection( + Service service, String path, Class itemClass, Args args) { + super(service, path, args); + this.itemClass = itemClass; + } + + /** {@inheritDoc} */ + public void clear() { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + public boolean containsKey(Object key) { + return validate().items.containsKey(key); + } + + /** + * Determines whether a scoped, namespace-constrained key + * exists within this collection. + * + * @param key The key to look up. + * @param namespace The namespace to constrain the search to. + * @return {@code true} if the key exists, {@code false} if not. + */ + public boolean containsKey(Object key, Args namespace) { + Util.ensureNamespaceIsExact(namespace); + validate(); + + LinkedList<T> entities = items.get(key); + if (entities == null || entities.isEmpty()) return false; + String pathMatcher = service.fullpath("", namespace); + for (T entity: entities) { + if (entity.path.startsWith(pathMatcher)) { + return true; + } + } + return false; + } + + /** {@inheritDoc} */ + public boolean containsValue(Object value) { + // value should be a non-linked-list value; values are stored as linked + // lists inside our container. + LinkedList<Object> linkedList = new LinkedList<>(); + linkedList.add(value); + return validate().items.containsValue(linkedList); + } + + static Class[] itemSig = new Class[] { Service.class, String.class }; + + /** + * Creates a collection member. + * + * @param itemClass The class of the member to create. + * @param path The path to the member resource. + * @param namespace The namespace. + * @return The new member. + */ + protected T createItem(Class itemClass, String path, Args namespace) { + Constructor constructor; + try { + constructor = itemClass.getDeclaredConstructor(itemSig); + } + catch (NoSuchMethodException e) { + throw new RuntimeException(e); + } + + T item; + try { + while (true) { + Object obj = constructor.newInstance(service, service.fullpath(path, namespace)); + //if (obj instanceof Message) { // We ignore messages sent back inline. + // continue; + //} else { + item = (T)obj; + break; + //} + } + } + catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + catch (InvocationTargetException e) { + throw new RuntimeException(e.getTargetException()); + } + catch (InstantiationException e) { + throw new RuntimeException(e); + } + + return item; + } + + /** + * Creates a collection member corresponding to a given + * Atom entry. This base implementation uses the class object that was + * passed in when the generic {@code ResourceCollection} was created. + * Subclasses may override this method to provide alternative means of + * instantiating collection members. + * + * @param entry The {@code AtomEntry} corresponding to the member to + * instantiate. + * @return The new member. + */ + protected T createItem(AtomEntry entry) { + return createItem(itemClass, itemPath(entry), namespace(entry)); + } + + /** {@inheritDoc} */ + public Set<Entry<String, T>> entrySet() { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + @Override public boolean equals(Object o) { + return validate().items.equals(o); + } + + /** + * Gets the value of a given key, if it exists within this collection. + * + * @param key The key to look up. + * @return The value indexed by the key, or {@code null} if it doesn't + * exist. + * @throws SplunkException The exception to throw if there is more than one + * value represented by this key. + */ + public T get(Object key) { + validate(); + LinkedList<T> entities = items.get(key); + if (entities != null && entities.size() > 1) { + throw new SplunkException(SplunkException.AMBIGUOUS, + "Key has multiple values, specify a namespace"); + } + if (entities == null || entities.isEmpty()) return null; + return entities.get(0); + } + + /** + * Gets a the value of a scoped, namespace-constrained key, if it exists + * within this collection. + * + * @param key The key to look up. + * @param namespace The namespace to constrain the search to. + * @return The value indexed by the key, or {@code null} if it doesn't + * exist. + */ + public T get(Object key, Args namespace) { + Util.ensureNamespaceIsExact(namespace); + validate(); + + LinkedList<T> entities = items.get(key); + if (entities == null || entities.isEmpty()) return null; + String pathMatcher = service.fullpath("", namespace); + for (T entity: entities) { + if (entity.path.startsWith(pathMatcher)) { + return entity; + } + } + return null; + } + + @Override public int hashCode() { + return validate().items.hashCode(); + } + + /** {@inheritDoc} */ + public boolean isEmpty() { + return validate().items.isEmpty(); + } + + /** + * Returns the value to use as the key from a given Atom entry. + * Subclasses may override this value for collections that use something + * other than "title" as the key. + * + * @param entry The {@code AtomEntry} corresponding to the collection + * member. + * @return The value to use as the member's key. + */ + protected String itemKey(AtomEntry entry) { + return entry.title; + } + + /** + * Returns the value to use as the member's path from a given Atom entry. + * Subclasses may override this value to support alternative methods of + * determining a member's path. + * + * @param entry The {@code AtomEntry} corresponding to the collection + * member. + * @return The value to use as the member's path. + */ + protected String itemPath(AtomEntry entry) { + return entry.links.get("alternate"); + } + + private Args namespace(AtomEntry entry) { + Args namespace = new Args(); + + // no content? return an empty namespace. + if (entry.content == null) + return namespace; + + HashMap<String, String> entityMetadata = + (HashMap<String, String>)entry.content.get("eai:acl"); + + // If there is no ACL info, we just create an empty map + if (entityMetadata == null) { + entityMetadata = new HashMap<>(); + } + + if (entityMetadata.containsKey("owner")) + namespace.put("owner", entityMetadata.get("owner")); + if (entityMetadata.containsKey("app")) + namespace.put("app", entityMetadata.get("app")); + if (entityMetadata.containsKey("sharing")) + namespace.put("sharing", entityMetadata.get("sharing")); + return namespace; + } + + /** {@inheritDoc} */ + public Set<String> keySet() { + return validate().items.keySet(); + } + + /** + * Issues an HTTP request to list the contents of the collection resource. + * + * @return The list response message. + */ + public ResponseMessage list() { + return service.get(path, this.refreshArgs); + } + + /** + * Loads the collection resource from a given Atom feed. + * + * @param value The {@code AtomFeed} instance to load the collection from. + * @return The current {@code ResourceCollection} instance. + */ + ResourceCollection<T> load(AtomFeed value) { + super.load(value); + for (AtomEntry entry : value.entries) { + String key = itemKey(entry); + T item = createItem(entry); + if (items.containsKey(key)) { + LinkedList<T> list = items.get(key); + list.add(item); + } else { + LinkedList<T> list = new LinkedList<>(); + list.add(item); + items.put(key, list); + } + } + return this; + } + + /** {@inheritDoc} */ + public T put(String key, T value) { + throw new UnsupportedOperationException(); + } + + /** + * Copies all mappings from a given map to this map (unsupported). + * + * @param map The set of mappings to copy into this map. + */ + public void putAll(Map<? extends String, ? extends T> map) { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + @Override public ResourceCollection refresh() { + items.clear(); + ResponseMessage response = list(); + assert(response.getStatus() == 200); + + AtomFeed feed = null; + try { + feed = AtomFeed.parseStream(response.getContent()); + } catch (Exception e) { + throw new RuntimeException(e); + } + load(feed); + return this; + } + + /** {@inheritDoc} */ + public T remove(Object key) { + throw new UnsupportedOperationException(); + } + + /** {@inheritDoc} */ + public int size() { + return validate().items.size(); + } + + /** {@inheritDoc} */ + @Override public ResourceCollection<T> validate() { + super.validate(); + return this; + } + + /** {@inheritDoc} */ + public Collection<T> values() { + LinkedList<T> collection = new LinkedList<>(); + validate(); + Set<String> keySet = items.keySet(); + for (String key: keySet) { + LinkedList<T> list = items.get(key); + for (T item: list) { + collection.add(item); + } + } + return collection; + } + + /** + * Returns the number of values that a specific key represents. + * + * @param key The key to look up. + * @return The number of entity values represented by the key. + */ + public int valueSize(Object key) { + validate(); + LinkedList<T> entities = items.get(key); + if (entities == null || entities.isEmpty()) return 0; + return entities.size(); + } +} diff --git a/splunk/src/main/java/com/splunk/ResponseMessage.java b/splunk/src/main/java/com/splunk/ResponseMessage.java index 24089081..911b15d6 100644 --- a/splunk/src/main/java/com/splunk/ResponseMessage.java +++ b/splunk/src/main/java/com/splunk/ResponseMessage.java @@ -1,80 +1,80 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.InputStream; -import java.util.Map; -import java.util.TreeMap; - -/** - * The {@code ResponseMessage} class represents an HTTP response message that - * includes status codes, response headers, and body content. - */ -public class ResponseMessage { - int status; - Map<String, String> header = null; - InputStream content; - - /** - * Default class constructor. - */ - ResponseMessage() {} - - /** - * Class constructor. - * - * @param status The initial status. - */ - ResponseMessage(int status) { - this.status = status; - } - - ResponseMessage(int status, InputStream content) { - this.status = status; - this.content = content; - } - - /** - * Returns the body content stream. - * - * @return The content stream. - */ - public InputStream getContent() { - return this.content; - } - - /** - * Returns the response headers. - * - * @return Response headers. - */ - public Map<String, String> getHeader() { - if (this.header == null) - this.header = - new TreeMap<>(String.CASE_INSENSITIVE_ORDER); - return this.header; - } - - /** - * Returns the response status. - * - * @return The response status. - */ - public int getStatus() { - return this.status; - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.InputStream; +import java.util.Map; +import java.util.TreeMap; + +/** + * The {@code ResponseMessage} class represents an HTTP response message that + * includes status codes, response headers, and body content. + */ +public class ResponseMessage { + int status; + Map<String, String> header = null; + InputStream content; + + /** + * Default class constructor. + */ + ResponseMessage() {} + + /** + * Class constructor. + * + * @param status The initial status. + */ + ResponseMessage(int status) { + this.status = status; + } + + ResponseMessage(int status, InputStream content) { + this.status = status; + this.content = content; + } + + /** + * Returns the body content stream. + * + * @return The content stream. + */ + public InputStream getContent() { + return this.content; + } + + /** + * Returns the response headers. + * + * @return Response headers. + */ + public Map<String, String> getHeader() { + if (this.header == null) + this.header = + new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + return this.header; + } + + /** + * Returns the response status. + * + * @return The response status. + */ + public int getStatus() { + return this.status; + } +} diff --git a/splunk/src/main/java/com/splunk/ResultsReader.java b/splunk/src/main/java/com/splunk/ResultsReader.java index 3ccaee6c..d2c60777 100644 --- a/splunk/src/main/java/com/splunk/ResultsReader.java +++ b/splunk/src/main/java/com/splunk/ResultsReader.java @@ -1,178 +1,178 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.Iterator; - -/** - * The {@code ResultsReader} class is a base class for the streaming readers - * for Splunk search results. This class should not be used to retrieve preview - * results for an export search. - */ -public abstract class ResultsReader - extends StreamIterableBase<Event> - implements SearchResults { - protected final InputStream inputStream; - // Default should be false which will result in no result set skipping. - boolean isPreview; - boolean isExportStream; - private boolean isInMultiReader; - - ResultsReader(InputStream inputStream, boolean isInMultiReader) - throws IOException { - this.inputStream = inputStream; - isExportStream = inputStream instanceof ExportResultsStream; - this.isInMultiReader = isInMultiReader; - } - - /** - * Closes the reader and returns resources. - * - * @throws IOException On IO exception. - */ - public void close() throws IOException { - inputStream.close(); - } - - /** - * Returns the next event in the event stream. - * - * @return The map of key-value pairs for an event. - * The format of multi-item values is implementation-specific. - * We recommend using the methods from the - * {@link Event} class to interpret multi-item values. - * @throws IOException On IO exception. - */ - final public Event getNextEvent() throws IOException { - return getNextElement(); - } - - /** - * Returns an iterator over the events from this reader. - * @return an Iterator. - */ - @Override - public final Iterator<Event> iterator() { - return super.iterator(); - } - - /** - * Returns the next event while moving to the next set - * automatically when needed, such as concatenating final results - * across multiple sets. - * - * @return null {@code null} if the end is reached. - * @throws IOException On IO exception. - */ - final Event getNextElement() throws IOException { - Event event; - while (true) { - event = getNextEventInCurrentSet(); - - // If we actually managed to get an event, then we break and return it - if (event != null) - break; - - // We don't concatenate across previews across sets, since each set - // might be a snapshot at a given time or a summary result with - // partial data from a reporting search - // (for example "count by host"). So if this is a preview, - // break. Null return indicating the end of the set. - if (isPreview) - break; - - // If we did not advance to next set, i.e. the end of stream is - // reached, break. Null return indicating the end of the set. - if (!advanceStreamToNextSet()) - break; - - // We have advanced to the next set. isPreview is for that set. - // It should not be a preview. Splunk should never return a preview - // after final results which we might have concatenated together - // across sets. - assert (!isPreview) : - "Preview result set should never be after a final set."; - } - return event; - } - - /* - * Get the next event in the current result set. Return null - * if the end is reached. - */ - abstract Event getNextEventInCurrentSet() throws IOException; - - /* - * Return false if the end is reached. - */ - final boolean resetIteratorToNextSet() throws IOException { - - // Get to the beginning of the next set in the stream - // skipping remaining event(s) if any in the current set. - boolean hasMoreResults = advanceStreamToNextSet(); - - // Reset the iterator so that it would either fetch a new - // element for the next iteration or stop. - resetIteration(hasMoreResults); - - return hasMoreResults; - } - - /* - * Return false if the end is reached. - */ - boolean advanceStreamToNextSet() throws IOException { - // Indicate that no more sets are available - // Subclasses can override this method to support - // MultiResultsReader. - return false; - } - - /* - * This method is used by constructors of result readers to do - * the following for single reader: - * 1. Obtain the preview flag and the field list. - * 2. Skip any previews for export. - */ - final void finishInitialization() throws IOException { - if (isInMultiReader) - return; - - while (true) { - // Stop if no more set is available - if (!advanceStreamToNextSet()) { - // Terminating the iteration. - // This avoids future callings into the underlying reader - // to get events, which may result in exceptions. - resetIteration(false); - break; - } - - // No skipping of result sets if the stream - // is not from an export endpoint. - if (!isExportStream) - break; - - // Skipping ends at any file results. - if (!isPreview) - break; - } - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.Iterator; + +/** + * The {@code ResultsReader} class is a base class for the streaming readers + * for Splunk search results. This class should not be used to retrieve preview + * results for an export search. + */ +public abstract class ResultsReader + extends StreamIterableBase<Event> + implements SearchResults { + protected final InputStream inputStream; + // Default should be false which will result in no result set skipping. + boolean isPreview; + boolean isExportStream; + private boolean isInMultiReader; + + ResultsReader(InputStream inputStream, boolean isInMultiReader) + throws IOException { + this.inputStream = inputStream; + isExportStream = inputStream instanceof ExportResultsStream; + this.isInMultiReader = isInMultiReader; + } + + /** + * Closes the reader and returns resources. + * + * @throws IOException On IO exception. + */ + public void close() throws IOException { + inputStream.close(); + } + + /** + * Returns the next event in the event stream. + * + * @return The map of key-value pairs for an event. + * The format of multi-item values is implementation-specific. + * We recommend using the methods from the + * {@link Event} class to interpret multi-item values. + * @throws IOException On IO exception. + */ + final public Event getNextEvent() throws IOException { + return getNextElement(); + } + + /** + * Returns an iterator over the events from this reader. + * @return an Iterator. + */ + @Override + public final Iterator<Event> iterator() { + return super.iterator(); + } + + /** + * Returns the next event while moving to the next set + * automatically when needed, such as concatenating final results + * across multiple sets. + * + * @return null {@code null} if the end is reached. + * @throws IOException On IO exception. + */ + final Event getNextElement() throws IOException { + Event event; + while (true) { + event = getNextEventInCurrentSet(); + + // If we actually managed to get an event, then we break and return it + if (event != null) + break; + + // We don't concatenate across previews across sets, since each set + // might be a snapshot at a given time or a summary result with + // partial data from a reporting search + // (for example "count by host"). So if this is a preview, + // break. Null return indicating the end of the set. + if (isPreview) + break; + + // If we did not advance to next set, i.e. the end of stream is + // reached, break. Null return indicating the end of the set. + if (!advanceStreamToNextSet()) + break; + + // We have advanced to the next set. isPreview is for that set. + // It should not be a preview. Splunk should never return a preview + // after final results which we might have concatenated together + // across sets. + assert (!isPreview) : + "Preview result set should never be after a final set."; + } + return event; + } + + /* + * Get the next event in the current result set. Return null + * if the end is reached. + */ + abstract Event getNextEventInCurrentSet() throws IOException; + + /* + * Return false if the end is reached. + */ + final boolean resetIteratorToNextSet() throws IOException { + + // Get to the beginning of the next set in the stream + // skipping remaining event(s) if any in the current set. + boolean hasMoreResults = advanceStreamToNextSet(); + + // Reset the iterator so that it would either fetch a new + // element for the next iteration or stop. + resetIteration(hasMoreResults); + + return hasMoreResults; + } + + /* + * Return false if the end is reached. + */ + boolean advanceStreamToNextSet() throws IOException { + // Indicate that no more sets are available + // Subclasses can override this method to support + // MultiResultsReader. + return false; + } + + /* + * This method is used by constructors of result readers to do + * the following for single reader: + * 1. Obtain the preview flag and the field list. + * 2. Skip any previews for export. + */ + final void finishInitialization() throws IOException { + if (isInMultiReader) + return; + + while (true) { + // Stop if no more set is available + if (!advanceStreamToNextSet()) { + // Terminating the iteration. + // This avoids future callings into the underlying reader + // to get events, which may result in exceptions. + resetIteration(false); + break; + } + + // No skipping of result sets if the stream + // is not from an export endpoint. + if (!isExportStream) + break; + + // Skipping ends at any file results. + if (!isPreview) + break; + } + } +} diff --git a/splunk/src/main/java/com/splunk/ResultsReaderCsv.java b/splunk/src/main/java/com/splunk/ResultsReaderCsv.java index 64b3dec8..668de189 100644 --- a/splunk/src/main/java/com/splunk/ResultsReaderCsv.java +++ b/splunk/src/main/java/com/splunk/ResultsReaderCsv.java @@ -1,116 +1,116 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import au.com.bytecode.opencsv.CSVReader; - -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; - -/** - * The {@code ResultsReaderCsv} class represents a streaming CSV reader for - * Splunk search results. This class requires the opencsv-2.3.jar file in your - * build path. - */ -public class ResultsReaderCsv extends ResultsReader { - - private CSVReader csvReader = null; - private List<String> keys; - - /** - * Class constructor. - * - * Constructs a streaming CSV reader for the event stream. You should only - * attempt to parse a CSV stream with this reader. If you attempt to parse - * a different type of stream, unpredictable results may occur. - * - * @param inputStream The CSV stream to parse. - * @throws IOException The IOException instance - */ - public ResultsReaderCsv(InputStream inputStream) throws IOException { - super(inputStream, false); - if (isExportStream) - throw new UnsupportedOperationException( - "A stream from an export endpoint is not supported " + - "by a CSV result reader. Use XML or JSON search output "+ - "format and matching reader instead." - ); - csvReader = new CSVReader(new InputStreamReader(inputStream, "UTF-8")); - // initial line contains the keyArray, except for oneshot -- which - // contains a blank line, and then the key list. - String[] keyArray = csvReader.readNext(); - if (keyArray.length == 1 && keyArray[0].trim().equals("")) { - keyArray = csvReader.readNext(); - } - keys = Arrays.asList(keyArray); - } - - /** {@inheritDoc} */ - @Override public void close() throws IOException { - super.close(); - if (csvReader != null) - csvReader.close(); - csvReader = null; - } - - /** - * This method is not supported. - * @return Not applicable. - */ - public boolean isPreview(){ - throw new UnsupportedOperationException( - "isPreview() is not supported by this subclass."); - } - - /** {@inheritDoc} */ - public Collection<String> getFields(){ - return keys; - } - - /* - * Multiple result sets are not supported by this reader. - * This function reads the entire stream. - * An application won't reach here with a stream from - * an /export endpoint. The constructor will throw an error in that case. - */ - @Override Event getNextEventInCurrentSet() throws IOException { - Event returnData = null; - String[] line; - - if ((line = csvReader.readNext()) != null) { - if (line.length == 1 && line[0].equals("")) { - line = csvReader.readNext(); - if (line == null) { - return returnData; - } - } - - returnData = new Event(); - int count = 0; - for (String key : keys) { - String delimitedValues = line[count++]; - returnData.putSingleOrDelimited(key, delimitedValues); - } - } - - return returnData; - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import au.com.bytecode.opencsv.CSVReader; + +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +/** + * The {@code ResultsReaderCsv} class represents a streaming CSV reader for + * Splunk search results. This class requires the opencsv-2.3.jar file in your + * build path. + */ +public class ResultsReaderCsv extends ResultsReader { + + private CSVReader csvReader = null; + private List<String> keys; + + /** + * Class constructor. + * + * Constructs a streaming CSV reader for the event stream. You should only + * attempt to parse a CSV stream with this reader. If you attempt to parse + * a different type of stream, unpredictable results may occur. + * + * @param inputStream The CSV stream to parse. + * @throws IOException The IOException instance + */ + public ResultsReaderCsv(InputStream inputStream) throws IOException { + super(inputStream, false); + if (isExportStream) + throw new UnsupportedOperationException( + "A stream from an export endpoint is not supported " + + "by a CSV result reader. Use XML or JSON search output "+ + "format and matching reader instead." + ); + csvReader = new CSVReader(new InputStreamReader(inputStream, "UTF-8")); + // initial line contains the keyArray, except for oneshot -- which + // contains a blank line, and then the key list. + String[] keyArray = csvReader.readNext(); + if (keyArray.length == 1 && keyArray[0].trim().equals("")) { + keyArray = csvReader.readNext(); + } + keys = Arrays.asList(keyArray); + } + + /** {@inheritDoc} */ + @Override public void close() throws IOException { + super.close(); + if (csvReader != null) + csvReader.close(); + csvReader = null; + } + + /** + * This method is not supported. + * @return Not applicable. + */ + public boolean isPreview(){ + throw new UnsupportedOperationException( + "isPreview() is not supported by this subclass."); + } + + /** {@inheritDoc} */ + public Collection<String> getFields(){ + return keys; + } + + /* + * Multiple result sets are not supported by this reader. + * This function reads the entire stream. + * An application won't reach here with a stream from + * an /export endpoint. The constructor will throw an error in that case. + */ + @Override Event getNextEventInCurrentSet() throws IOException { + Event returnData = null; + String[] line; + + if ((line = csvReader.readNext()) != null) { + if (line.length == 1 && line[0].equals("")) { + line = csvReader.readNext(); + if (line == null) { + return returnData; + } + } + + returnData = new Event(); + int count = 0; + for (String key : keys) { + String delimitedValues = line[count++]; + returnData.putSingleOrDelimited(key, delimitedValues); + } + } + + return returnData; + } +} diff --git a/splunk/src/main/java/com/splunk/ResultsReaderJson.java b/splunk/src/main/java/com/splunk/ResultsReaderJson.java index fbbc9b83..e1760b21 100644 --- a/splunk/src/main/java/com/splunk/ResultsReaderJson.java +++ b/splunk/src/main/java/com/splunk/ResultsReaderJson.java @@ -1,356 +1,356 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonToken; - -import java.io.EOFException; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -/** - * The {@code ResultsReaderJson} class represents a streaming JSON reader for - * Splunk search results. This class requires the gson-2.1.jar file in your - * build path. If you want to access the preview events, use the - * {@link MultiResultsReaderJson} class. - */ -public class ResultsReaderJson extends ResultsReader { - private JsonReader jsonReader; - // Helper object that will only be constructed if the reader is handling - // json format used by export. - private ExportHelper exportHelper; - // Whether the 'preview' flag is read - private boolean previewFlagRead; - - /** - * Class constructor. - * - * Constructs a streaming JSON reader for the event stream. You should only - * attempt to parse a JSON stream with this reader. If you attempt to parse - * a different type of stream, unpredictable results may occur. - * - * @param inputStream The JSON stream to parse. - * @throws IOException The IOException instance - */ - public ResultsReaderJson(InputStream inputStream) throws IOException { - this(inputStream, false); - } - - ResultsReaderJson(InputStream inputStream, boolean isInMultiReader) - throws IOException { - super(inputStream, isInMultiReader); - jsonReader = new JsonReader(new InputStreamReader(inputStream, "UTF-8")); - // if stream is empty, return a null reader. - jsonReader.setLenient(true); - if (isExportStream || isInMultiReader) - exportHelper = new ExportHelper(); - finishInitialization(); - } - - // Advance in the json stream, reading meta data if available, and - // get ready for readEvent method. - // Return false if end of stream is encountered. - boolean advanceIntoNextSetBeforeEvent() throws IOException { - // jsonReader will be set to null once the end is reached. - if (jsonReader == null) - return false; - - // In Splunk 5.0 from the export endpoint, - // each result is in its own top level object. - // In Splunk 5.0 not from the export endpoint, the results are - // an array at that object's key "results". - // In Splunk 4.3, the - // array was the top level returned. So if we find an object - // at top level, we step into it until we find the right key, - // then leave it in that state to iterate over. - try { - // Json single-reader depends on 'isExport' flag to function. - // It does not support a stream from a file saved from - // a stream from an export endpoint. - // Json multi-reader assumes export format thus does not support - // a stream from none export endpoints. - if (exportHelper != null) { - if (jsonReader.peek() == JsonToken.BEGIN_ARRAY) - throw new UnsupportedOperationException( - "A stream from an export endpoint of " + - "a Splunk 4.x server in the JSON output format " + - "is not supported by this class. " + - "Use the XML search output format, " + - "and an XML result reader instead."); - /* - * We're on a stream from an export endpoint - * Below is an example of an input stream. - * {"preview":true,"offset":0,"lastrow":true,"result":{"host":"Andy-PC","count":"62"}} - * {"preview":true,"offset":0,"result":{"host":"Andy-PC","count":"1682"}} - */ - // Read into first result object of the next set. - while (true) { - boolean endPassed = exportHelper.lastRow; - exportHelper.skipRestOfRow(); - if (!exportHelper.readIntoRow()) - return false; - if (endPassed) - break; - } - return true; - } - // Single-reader not from an export endpoint - if (jsonReader.peek() == JsonToken.BEGIN_OBJECT) { - /* - * We're on Splunk 5 with a single-reader not from - * an export endpoint - * Below is an example of an input stream. - * {"preview":false,"init_offset":0,"messages":[{"type":"DEBUG","text":"base lispy: [ AND index::_internal ]"},{"type":"DEBUG","text":"search context: user=\"admin\", app=\"search\", bs-pathname=\"/Users/fross/splunks/splunk-5.0/etc\""}],"results":[{"sum(kb)":"14372242.758775","series":"twitter"},{"sum(kb)":"267802.333926","series":"splunkd"},{"sum(kb)":"5979.036338","series":"splunkd_access"}]} - */ - jsonReader.beginObject(); - String key; - while (true) { - key = jsonReader.nextName(); - if (key.equals("preview")) - readPreviewFlag(); - else if (key.equals("results")) { - jsonReader.beginArray(); - return true; - } else { - skipEntity(); - } - } - } else { // We're on Splunk 4.x, and we just need to start the array. - /* - * Below is an example of an input stream - * [ - * { - * "sum(kb)":"14372242.758775", - * "series":"twitter" - * }, - * { - * "sum(kb)":"267802.333926", - * "series":"splunkd" - * }, - * { - * "sum(kb)":"5979.036338", - * "series":"splunkd_access" - * } - * ] - */ - jsonReader.beginArray(); - return true; - } - } catch (EOFException e) { - return false; - } - } - - private void readPreviewFlag() throws IOException { - isPreview = jsonReader.nextBoolean(); - previewFlagRead = true; - } - - /** - * Skip the next value, whether it is atomic or compound, in the JSON - * stream. - */ - private void skipEntity() throws IOException { - if (jsonReader.peek() == JsonToken.STRING) { - jsonReader.nextString(); - } else if (jsonReader.peek() == JsonToken.BOOLEAN) { - jsonReader.nextBoolean(); - } else if (jsonReader.peek() == JsonToken.NUMBER) { - jsonReader.nextDouble(); - } else if (jsonReader.peek() == JsonToken.NULL) { - jsonReader.nextNull(); - } else if (jsonReader.peek() == JsonToken.NAME) { - jsonReader.nextName(); - } else if (jsonReader.peek() == JsonToken.BEGIN_ARRAY) { - jsonReader.beginArray(); - while (jsonReader.peek() != JsonToken.END_ARRAY) { - skipEntity(); - } - jsonReader.endArray(); - } else if (jsonReader.peek() == JsonToken.BEGIN_OBJECT) { - jsonReader.beginObject(); - while (jsonReader.peek() != JsonToken.END_OBJECT) { - skipEntity(); - } - jsonReader.endObject(); - } - } - - /** {@inheritDoc} */ - @Override public void close() throws IOException { - super.close(); - if (jsonReader != null) - jsonReader.close(); - jsonReader = null; - } - - /** {@inheritDoc} */ - public boolean isPreview(){ - if (!previewFlagRead) - throw new UnsupportedOperationException( - "isPreview() is not supported " + - "with a stream from a Splunk 4.x server by this class. " + - "Use the XML format and an XML result reader instead."); - return isPreview; - } - - /** - * This method is not supported. - * @return Not applicable. - */ - public Collection<String> getFields(){ - throw new UnsupportedOperationException( - "getFields() is not supported by this subclass."); - } - - @Override Event getNextEventInCurrentSet() throws IOException { - if (exportHelper != null) { - // If the last row has been passed and moveToNextStreamPosition - // has not been called, end the current set. - if (exportHelper.lastRow && !exportHelper.inRow ) { - return null; - } - exportHelper.readIntoRow(); - } - - Event returnData = readEvent(); - - if (exportHelper != null) { - exportHelper.skipRestOfRow(); - return returnData; - } - // Single reader not from export - if (returnData == null) - close(); - return returnData; - } - - private Event readEvent() throws IOException { - Event returnData = null; - String name = null; - List<String> values = new ArrayList<>(); - - if (jsonReader == null) - return null; - - // Events are almost flat, so no need for a true general parser - // solution. But the Gson parser is a little unintuitive here. Nested - // objects, have their own relative notion of hasNext. This - // means that for every object or array start, hasNext() returns false - // and one must consume the closing (END) object to get back to the - // previous object. - while (jsonReader.hasNext()) { - if (returnData == null) { - returnData = new Event(); - } - if (jsonReader.peek() == JsonToken.BEGIN_OBJECT) { - jsonReader.beginObject(); - } - if (jsonReader.peek() == JsonToken.BEGIN_ARRAY) { - jsonReader.beginArray(); - // The Gson parser is a little unintuitive here. Nested objects, - // have their own relative notion of hasNext; when hasNext() - // is done, it is only for this array. - while (jsonReader.hasNext()) { - JsonToken jsonToken2 = jsonReader.peek(); - if (jsonToken2 == JsonToken.STRING) { - values.add(jsonReader.nextString()); - } - } - jsonReader.endArray(); - - String[] valuesArray = - values.toArray(new String[values.size()]); - returnData.putArray(name, valuesArray); - - values.clear(); - } - if (jsonReader.peek() == JsonToken.NAME) { - name = jsonReader.nextName(); - } - if (jsonReader.peek() == JsonToken.STRING) { - String delimitedValues = jsonReader.nextString(); - returnData.putSingleOrDelimited(name, delimitedValues); - } - if (jsonReader.peek() == JsonToken.END_OBJECT) { - jsonReader.endObject(); - break; - } - if (jsonReader.peek() == JsonToken.END_ARRAY) { - jsonReader.endArray(); - } - } - return returnData; - } - - @Override boolean advanceStreamToNextSet() throws IOException{ - return advanceIntoNextSetBeforeEvent(); - } - - /** - * Contains code only used for streams from the export endpoint. - */ - private class ExportHelper { - // Initial value must be true so that - // the first row is treated as the start of a new set. - boolean lastRow = true; - boolean inRow; - - ExportHelper() { } - - // Return false if end of stream is encountered. - private boolean readIntoRow() throws IOException { - if (inRow) - return true; - if (jsonReader.peek() == JsonToken.END_DOCUMENT) - return false; - inRow = true; - jsonReader.beginObject(); - // lastrow name and value pair does not appear if the row - // is not the last in the set. - lastRow = false; - while (jsonReader.hasNext()) { - String key = jsonReader.nextName(); - if (key.equals("preview")) { - readPreviewFlag(); - } else if (key.equals("lastrow")) { - lastRow = jsonReader.nextBoolean(); - } else if (key.equals("result")) { - return true; - } else { - skipEntity(); - } - } - return false; - } - - private void skipRestOfRow() throws IOException { - if (!inRow) - return; - inRow = false; - while (jsonReader.peek() != JsonToken.END_OBJECT) { - skipEntity(); - } - jsonReader.endObject(); - } - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonToken; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** + * The {@code ResultsReaderJson} class represents a streaming JSON reader for + * Splunk search results. This class requires the gson-2.1.jar file in your + * build path. If you want to access the preview events, use the + * {@link MultiResultsReaderJson} class. + */ +public class ResultsReaderJson extends ResultsReader { + private JsonReader jsonReader; + // Helper object that will only be constructed if the reader is handling + // json format used by export. + private ExportHelper exportHelper; + // Whether the 'preview' flag is read + private boolean previewFlagRead; + + /** + * Class constructor. + * + * Constructs a streaming JSON reader for the event stream. You should only + * attempt to parse a JSON stream with this reader. If you attempt to parse + * a different type of stream, unpredictable results may occur. + * + * @param inputStream The JSON stream to parse. + * @throws IOException The IOException instance + */ + public ResultsReaderJson(InputStream inputStream) throws IOException { + this(inputStream, false); + } + + ResultsReaderJson(InputStream inputStream, boolean isInMultiReader) + throws IOException { + super(inputStream, isInMultiReader); + jsonReader = new JsonReader(new InputStreamReader(inputStream, "UTF-8")); + // if stream is empty, return a null reader. + jsonReader.setLenient(true); + if (isExportStream || isInMultiReader) + exportHelper = new ExportHelper(); + finishInitialization(); + } + + // Advance in the json stream, reading meta data if available, and + // get ready for readEvent method. + // Return false if end of stream is encountered. + boolean advanceIntoNextSetBeforeEvent() throws IOException { + // jsonReader will be set to null once the end is reached. + if (jsonReader == null) + return false; + + // In Splunk 5.0 from the export endpoint, + // each result is in its own top level object. + // In Splunk 5.0 not from the export endpoint, the results are + // an array at that object's key "results". + // In Splunk 4.3, the + // array was the top level returned. So if we find an object + // at top level, we step into it until we find the right key, + // then leave it in that state to iterate over. + try { + // Json single-reader depends on 'isExport' flag to function. + // It does not support a stream from a file saved from + // a stream from an export endpoint. + // Json multi-reader assumes export format thus does not support + // a stream from none export endpoints. + if (exportHelper != null) { + if (jsonReader.peek() == JsonToken.BEGIN_ARRAY) + throw new UnsupportedOperationException( + "A stream from an export endpoint of " + + "a Splunk 4.x server in the JSON output format " + + "is not supported by this class. " + + "Use the XML search output format, " + + "and an XML result reader instead."); + /* + * We're on a stream from an export endpoint + * Below is an example of an input stream. + * {"preview":true,"offset":0,"lastrow":true,"result":{"host":"Andy-PC","count":"62"}} + * {"preview":true,"offset":0,"result":{"host":"Andy-PC","count":"1682"}} + */ + // Read into first result object of the next set. + while (true) { + boolean endPassed = exportHelper.lastRow; + exportHelper.skipRestOfRow(); + if (!exportHelper.readIntoRow()) + return false; + if (endPassed) + break; + } + return true; + } + // Single-reader not from an export endpoint + if (jsonReader.peek() == JsonToken.BEGIN_OBJECT) { + /* + * We're on Splunk 5 with a single-reader not from + * an export endpoint + * Below is an example of an input stream. + * {"preview":false,"init_offset":0,"messages":[{"type":"DEBUG","text":"base lispy: [ AND index::_internal ]"},{"type":"DEBUG","text":"search context: user=\"admin\", app=\"search\", bs-pathname=\"/Users/fross/splunks/splunk-5.0/etc\""}],"results":[{"sum(kb)":"14372242.758775","series":"twitter"},{"sum(kb)":"267802.333926","series":"splunkd"},{"sum(kb)":"5979.036338","series":"splunkd_access"}]} + */ + jsonReader.beginObject(); + String key; + while (true) { + key = jsonReader.nextName(); + if (key.equals("preview")) + readPreviewFlag(); + else if (key.equals("results")) { + jsonReader.beginArray(); + return true; + } else { + skipEntity(); + } + } + } else { // We're on Splunk 4.x, and we just need to start the array. + /* + * Below is an example of an input stream + * [ + * { + * "sum(kb)":"14372242.758775", + * "series":"twitter" + * }, + * { + * "sum(kb)":"267802.333926", + * "series":"splunkd" + * }, + * { + * "sum(kb)":"5979.036338", + * "series":"splunkd_access" + * } + * ] + */ + jsonReader.beginArray(); + return true; + } + } catch (EOFException e) { + return false; + } + } + + private void readPreviewFlag() throws IOException { + isPreview = jsonReader.nextBoolean(); + previewFlagRead = true; + } + + /** + * Skip the next value, whether it is atomic or compound, in the JSON + * stream. + */ + private void skipEntity() throws IOException { + if (jsonReader.peek() == JsonToken.STRING) { + jsonReader.nextString(); + } else if (jsonReader.peek() == JsonToken.BOOLEAN) { + jsonReader.nextBoolean(); + } else if (jsonReader.peek() == JsonToken.NUMBER) { + jsonReader.nextDouble(); + } else if (jsonReader.peek() == JsonToken.NULL) { + jsonReader.nextNull(); + } else if (jsonReader.peek() == JsonToken.NAME) { + jsonReader.nextName(); + } else if (jsonReader.peek() == JsonToken.BEGIN_ARRAY) { + jsonReader.beginArray(); + while (jsonReader.peek() != JsonToken.END_ARRAY) { + skipEntity(); + } + jsonReader.endArray(); + } else if (jsonReader.peek() == JsonToken.BEGIN_OBJECT) { + jsonReader.beginObject(); + while (jsonReader.peek() != JsonToken.END_OBJECT) { + skipEntity(); + } + jsonReader.endObject(); + } + } + + /** {@inheritDoc} */ + @Override public void close() throws IOException { + super.close(); + if (jsonReader != null) + jsonReader.close(); + jsonReader = null; + } + + /** {@inheritDoc} */ + public boolean isPreview(){ + if (!previewFlagRead) + throw new UnsupportedOperationException( + "isPreview() is not supported " + + "with a stream from a Splunk 4.x server by this class. " + + "Use the XML format and an XML result reader instead."); + return isPreview; + } + + /** + * This method is not supported. + * @return Not applicable. + */ + public Collection<String> getFields(){ + throw new UnsupportedOperationException( + "getFields() is not supported by this subclass."); + } + + @Override Event getNextEventInCurrentSet() throws IOException { + if (exportHelper != null) { + // If the last row has been passed and moveToNextStreamPosition + // has not been called, end the current set. + if (exportHelper.lastRow && !exportHelper.inRow ) { + return null; + } + exportHelper.readIntoRow(); + } + + Event returnData = readEvent(); + + if (exportHelper != null) { + exportHelper.skipRestOfRow(); + return returnData; + } + // Single reader not from export + if (returnData == null) + close(); + return returnData; + } + + private Event readEvent() throws IOException { + Event returnData = null; + String name = null; + List<String> values = new ArrayList<>(); + + if (jsonReader == null) + return null; + + // Events are almost flat, so no need for a true general parser + // solution. But the Gson parser is a little unintuitive here. Nested + // objects, have their own relative notion of hasNext. This + // means that for every object or array start, hasNext() returns false + // and one must consume the closing (END) object to get back to the + // previous object. + while (jsonReader.hasNext()) { + if (returnData == null) { + returnData = new Event(); + } + if (jsonReader.peek() == JsonToken.BEGIN_OBJECT) { + jsonReader.beginObject(); + } + if (jsonReader.peek() == JsonToken.BEGIN_ARRAY) { + jsonReader.beginArray(); + // The Gson parser is a little unintuitive here. Nested objects, + // have their own relative notion of hasNext; when hasNext() + // is done, it is only for this array. + while (jsonReader.hasNext()) { + JsonToken jsonToken2 = jsonReader.peek(); + if (jsonToken2 == JsonToken.STRING) { + values.add(jsonReader.nextString()); + } + } + jsonReader.endArray(); + + String[] valuesArray = + values.toArray(new String[values.size()]); + returnData.putArray(name, valuesArray); + + values.clear(); + } + if (jsonReader.peek() == JsonToken.NAME) { + name = jsonReader.nextName(); + } + if (jsonReader.peek() == JsonToken.STRING) { + String delimitedValues = jsonReader.nextString(); + returnData.putSingleOrDelimited(name, delimitedValues); + } + if (jsonReader.peek() == JsonToken.END_OBJECT) { + jsonReader.endObject(); + break; + } + if (jsonReader.peek() == JsonToken.END_ARRAY) { + jsonReader.endArray(); + } + } + return returnData; + } + + @Override boolean advanceStreamToNextSet() throws IOException{ + return advanceIntoNextSetBeforeEvent(); + } + + /** + * Contains code only used for streams from the export endpoint. + */ + private class ExportHelper { + // Initial value must be true so that + // the first row is treated as the start of a new set. + boolean lastRow = true; + boolean inRow; + + ExportHelper() { } + + // Return false if end of stream is encountered. + private boolean readIntoRow() throws IOException { + if (inRow) + return true; + if (jsonReader.peek() == JsonToken.END_DOCUMENT) + return false; + inRow = true; + jsonReader.beginObject(); + // lastrow name and value pair does not appear if the row + // is not the last in the set. + lastRow = false; + while (jsonReader.hasNext()) { + String key = jsonReader.nextName(); + if (key.equals("preview")) { + readPreviewFlag(); + } else if (key.equals("lastrow")) { + lastRow = jsonReader.nextBoolean(); + } else if (key.equals("result")) { + return true; + } else { + skipEntity(); + } + } + return false; + } + + private void skipRestOfRow() throws IOException { + if (!inRow) + return; + inRow = false; + while (jsonReader.peek() != JsonToken.END_OBJECT) { + skipEntity(); + } + jsonReader.endObject(); + } + } +} diff --git a/splunk/src/main/java/com/splunk/ResultsReaderXml.java b/splunk/src/main/java/com/splunk/ResultsReaderXml.java index ce7c8d0a..dd22d3bf 100644 --- a/splunk/src/main/java/com/splunk/ResultsReaderXml.java +++ b/splunk/src/main/java/com/splunk/ResultsReaderXml.java @@ -1,427 +1,427 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import javax.xml.namespace.QName; -import javax.xml.stream.*; -import javax.xml.stream.events.Attribute; -import javax.xml.stream.events.StartElement; -import javax.xml.stream.events.XMLEvent; -import java.io.*; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; - -/** - * The {@code ResultsReaderXml} class represents a streaming XML reader for - * Splunk search results. When a stream from an export search is passed to this - * reader, it skips any preview events in the stream. If you want to access the - * preview events, use the {@link MultiResultsReaderXml} class. - */ -public class ResultsReaderXml - extends ResultsReader { - - private XMLEventReader xmlReader = null; - private ArrayList<String> fields = new ArrayList<>(); - private PushbackInputStream pushbackInputStream; - - /** - * Class constructor. - * - * Constructs a streaming XML reader for the event stream. You should only - * attempt to parse an XML stream with this reader. If you attempt to parse - * a different type of stream, unpredictable results may occur. - * <br> - * The pushback reader modifies export streams to generate non-strict XML - * at the beginning of the stream. The streaming reader ignores preview - * data, and only extracts finalized data. - * - * @param inputStream The XML stream to parse. - * @throws IOException The IOException instance - */ - public ResultsReaderXml(InputStream inputStream) throws IOException { - this(inputStream, false); - } - - ResultsReaderXml( - InputStream inputStream, - boolean isInMultiReader) - throws IOException { - super(inputStream, isInMultiReader); - - // We need to do read-ahead, so we have to use a PushbackInputStream for everything - // in this class. - this.pushbackInputStream = new PushbackInputStream(inputStream); - XMLInputFactory inputFactory = XMLInputFactory.newInstance(); - - int ch = this.pushbackInputStream.read(); - if (ch == -1) { - return; // Stream is empty. - } else { - ((PushbackInputStream)this.pushbackInputStream).unread(ch); - } - - inputFactory.setProperty(XMLInputFactory.IS_COALESCING, true); - try { - InputStream filteredStream = new InsertRootElementFilterInputStream(this.pushbackInputStream); - xmlReader = inputFactory.createXMLEventReader(filteredStream); - finishInitialization(); - } catch (XMLStreamException e) { - throw new RuntimeException(e); - } - } - - /** {@inheritDoc} */ - @Override public void close() throws IOException { - if (xmlReader != null) { - try { - xmlReader.close(); - } catch (XMLStreamException e) { - throw new RuntimeException(e); - } - } - xmlReader = null; - - super.close(); - } - - /** {@inheritDoc} */ - public boolean isPreview() { - return isPreview; - } - - /** {@inheritDoc} */ - public Collection<String> getFields() { - return fields; - } - - @Override Event getNextEventInCurrentSet() throws IOException { - // Handle empty stream or other cases where xmlReader is - // not constructed. - if (xmlReader == null) { - return null; - } - try { - Event event = null; - XMLEvent xmlEvent = readToStartOfElementAtSameLevelWithName("result"); - if (xmlEvent != null) { - event = getResultKVPairs(); - } - return event; - } catch (XMLStreamException e) { - throw new RuntimeException(e); - } - } - - // Reads the preview flag and field name list, and position in the middle of - // the result element for reading actual results later. - // Return value indicates whether the next 'results' element is found. - boolean readIntoNextResultsElement() - throws XMLStreamException, IOException { - XMLEvent xmlEvent = readToStartOfElementWithName("results"); - if (xmlEvent == null) { - return false; - } - - if (xmlEvent != null && - xmlEvent.asStartElement() - .getAttributeByName(QName.valueOf("preview")) - .getValue() - .equals("0") ){ - isPreview = false; - } else { - isPreview = true; - } - - // Read <meta> element. - final String meta = "meta"; - if (readToStartOfElementAtSameLevelWithName(meta) != null) { - readFieldOrderElement(); - readToEndElementWithName(meta); - } - return true; - } - - XMLEvent readToStartOfElementWithName(String elementName) - throws XMLStreamException { - while (xmlReader.hasNext()) { - XMLEvent xmlEvent = xmlReader.nextEvent(); - int eType = xmlEvent.getEventType(); - if (eType != XMLStreamConstants.START_ELEMENT){ - continue; - } - - StartElement startElement = xmlEvent.asStartElement(); - if(startElement - .getName() - .getLocalPart() - .equals(elementName)){ - return xmlEvent; - } - } - return null; - } - - void readToEndElementWithName(String elementName) throws XMLStreamException { - XMLEvent xmlEvent; - int eType; - - while (xmlReader.hasNext()) { - xmlEvent = xmlReader.nextEvent(); - eType = xmlEvent.getEventType(); - switch (eType) { - case XMLStreamConstants.START_ELEMENT: - break; - case XMLStreamConstants.END_ELEMENT: - if (xmlEvent.asEndElement() - .getName() - .getLocalPart() - .equals(elementName)) { - return; - } - break; - default: - break; - } - } - - throw new RuntimeException("End tag of " + elementName + " not found."); - } - - /** - * Reads to the next specified start element at the same level. The reader - * stops past that element if it is found. Otherwise, the reader stops - * before the end element of the current level. - * - * @param elementName The name of the start element. - * @return The start element, or {@code null} if not found. - * @throws XMLStreamException - */ - XMLEvent readToStartOfElementAtSameLevelWithName(String elementName) - throws XMLStreamException { - XMLEvent xmlEvent; - int eType; - int level = 0; - while (xmlReader.hasNext()) { - xmlEvent = xmlReader.peek(); - eType = xmlEvent.getEventType(); - switch (eType) { - case XMLStreamConstants.START_ELEMENT: - if (level++ > 0){ - break; - } - StartElement startElement = xmlEvent.asStartElement(); - if (startElement - .getName() - .getLocalPart() - .equals(elementName)) { - xmlReader.nextEvent(); - return xmlEvent; - } - break; - case XMLStreamConstants.END_ELEMENT: - if (level-- == 0) { - return null; - } - break; - default: - break; - } - xmlReader.nextEvent(); - } - - throw new RuntimeException("Parent end element not found:" + elementName); - } - - // At the end, move off the end element of 'fieldOrder' - private void readFieldOrderElement() - throws IOException, XMLStreamException { - XMLEvent xmlEvent; - int eType; - int level = 0; - - if (readToStartOfElementAtSameLevelWithName("fieldOrder") == null) - return; - - while (xmlReader.hasNext()) { - xmlEvent = xmlReader.nextEvent(); - eType = xmlEvent.getEventType(); - switch (eType) { - case XMLStreamConstants.START_ELEMENT: - level++; - break; - case XMLStreamConstants.END_ELEMENT: - if (xmlEvent.asEndElement() - .getName() - .getLocalPart() - .equals("fieldOrder")) { - return; - } - level--; - break; - case XMLStreamConstants.CHARACTERS: - if (level == 1) { - fields.add(xmlEvent.asCharacters().getData()); - } - break; - default: - break; - } - } - - throw new RuntimeException("End tag of fieldOrder not found."); - } - - // At the end, move off the end tag of 'result' - private Event getResultKVPairs() - throws IOException, XMLStreamException { - - Event returnData = new Event(); - XMLEvent xmlEvent; - int eType; - String key = null; - List<String> values = new ArrayList<>(); - int level = 0; - - // Event results are flat, so extract k/v pairs based on XML indentation - // level throwing away the uninteresting non-data. - - while (xmlReader.hasNext()) { - xmlEvent = xmlReader.nextEvent(); - eType = xmlEvent.getEventType(); - switch (eType) { - case XMLStreamConstants.START_ELEMENT: - final StartElement startElement = xmlEvent.asStartElement(); - @SuppressWarnings("unchecked") - Iterator<Attribute> attrIttr = - startElement.getAttributes(); - if (level == 0) { - if (attrIttr.hasNext()) - key = attrIttr.next().getValue(); - } else if (level == 1 && - key.equals("_raw") && - startElement - .getName() - .getLocalPart() - .equals("v")) { - StringBuilder asString = new StringBuilder(); - StringWriter asXml = new StringWriter(); - readSubtree(startElement, asString, asXml); - values.add(asString.toString()); - returnData.putSegmentedRaw(asXml.toString()); - level--; - } - level++; - break; - case XMLStreamConstants.END_ELEMENT: - if (xmlEvent.asEndElement() - .getName() - .getLocalPart() - .equals("result")) - return returnData; - - if (--level == 0) { - String[] valuesArray = - values.toArray(new String[values.size()]); - returnData.putArray(key, valuesArray); - - key = null; - values.clear(); - } - break; - case XMLStreamConstants.CHARACTERS: - if (level > 1) { - values.add(xmlEvent.asCharacters().getData()); - } - break; - default: - break; - } - } - - throw new RuntimeException("End tag of 'result' not found."); - } - - @Override boolean advanceStreamToNextSet() throws IOException { - // Handle empty stream or other cases where xmlReader is - // not constructed. - if (xmlReader == null) { - return false; - } - try { - return readIntoNextResultsElement(); - } catch (XMLStreamException e) { - throw new RuntimeException(e); - } catch (NullPointerException e) { - // Invalid xml (<doc> and multiple <results> may results in - // this exception in the xml reader with JDK 1.7 at: - // com.sun.org.apache.xerces.internal.impl.XMLEntityScanner.load(XMLEntityScanner.java:1748) - return false; - } catch (ArrayIndexOutOfBoundsException e) { - // Invalid xml (<doc> and multiple <results> may results in - // this exception in the xml reader with JDK 1.6 at: - // com.sun.org.apache.xerces.internal.impl.XMLDocumentFragmentScannerImpl.endEntity(XMLDocumentFragmentScannerImpl.java:904) - return false; - } - } - - /** - * Read the whole element including those contained in the outer element. - * @param startElement start element (tag) of the outer element. - * @param asString output builder that the element's inner-text - * will be appended to, with markup removed and - * characters un-escaped - * @param asXml output builder that full xml including markups - * will be appended to. Characters are escaped as - * needed. - * @throws IOException - * @throws XMLStreamException - */ - void readSubtree( - StartElement startElement, - StringBuilder asString, - StringWriter asXml) - throws IOException, XMLStreamException { - XMLEventWriter xmlWriter = XMLOutputFactory.newInstance(). - createXMLEventWriter(asXml); - XMLEvent xmlEvent = startElement; - int level = 0; - do { - xmlWriter.add(xmlEvent); - int eType = xmlEvent.getEventType(); - switch (eType) { - case XMLStreamConstants.START_ELEMENT: - level++; - break; - case XMLStreamConstants.END_ELEMENT: - if (--level == 0) { - xmlWriter.close(); - return; - } - break; - case XMLStreamConstants.CHARACTERS: - asString.append(xmlEvent.asCharacters().getData()); - default: - break; - } - xmlEvent = xmlReader.nextEvent(); - } while (xmlReader.hasNext()); - throw new RuntimeException("Invalid XML format."); - } -} - +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import javax.xml.namespace.QName; +import javax.xml.stream.*; +import javax.xml.stream.events.Attribute; +import javax.xml.stream.events.StartElement; +import javax.xml.stream.events.XMLEvent; +import java.io.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + +/** + * The {@code ResultsReaderXml} class represents a streaming XML reader for + * Splunk search results. When a stream from an export search is passed to this + * reader, it skips any preview events in the stream. If you want to access the + * preview events, use the {@link MultiResultsReaderXml} class. + */ +public class ResultsReaderXml + extends ResultsReader { + + private XMLEventReader xmlReader = null; + private ArrayList<String> fields = new ArrayList<>(); + private PushbackInputStream pushbackInputStream; + + /** + * Class constructor. + * + * Constructs a streaming XML reader for the event stream. You should only + * attempt to parse an XML stream with this reader. If you attempt to parse + * a different type of stream, unpredictable results may occur. + * <br> + * The pushback reader modifies export streams to generate non-strict XML + * at the beginning of the stream. The streaming reader ignores preview + * data, and only extracts finalized data. + * + * @param inputStream The XML stream to parse. + * @throws IOException The IOException instance + */ + public ResultsReaderXml(InputStream inputStream) throws IOException { + this(inputStream, false); + } + + ResultsReaderXml( + InputStream inputStream, + boolean isInMultiReader) + throws IOException { + super(inputStream, isInMultiReader); + + // We need to do read-ahead, so we have to use a PushbackInputStream for everything + // in this class. + this.pushbackInputStream = new PushbackInputStream(inputStream); + XMLInputFactory inputFactory = XMLInputFactory.newInstance(); + + int ch = this.pushbackInputStream.read(); + if (ch == -1) { + return; // Stream is empty. + } else { + ((PushbackInputStream)this.pushbackInputStream).unread(ch); + } + + inputFactory.setProperty(XMLInputFactory.IS_COALESCING, true); + try { + InputStream filteredStream = new InsertRootElementFilterInputStream(this.pushbackInputStream); + xmlReader = inputFactory.createXMLEventReader(filteredStream); + finishInitialization(); + } catch (XMLStreamException e) { + throw new RuntimeException(e); + } + } + + /** {@inheritDoc} */ + @Override public void close() throws IOException { + if (xmlReader != null) { + try { + xmlReader.close(); + } catch (XMLStreamException e) { + throw new RuntimeException(e); + } + } + xmlReader = null; + + super.close(); + } + + /** {@inheritDoc} */ + public boolean isPreview() { + return isPreview; + } + + /** {@inheritDoc} */ + public Collection<String> getFields() { + return fields; + } + + @Override Event getNextEventInCurrentSet() throws IOException { + // Handle empty stream or other cases where xmlReader is + // not constructed. + if (xmlReader == null) { + return null; + } + try { + Event event = null; + XMLEvent xmlEvent = readToStartOfElementAtSameLevelWithName("result"); + if (xmlEvent != null) { + event = getResultKVPairs(); + } + return event; + } catch (XMLStreamException e) { + throw new RuntimeException(e); + } + } + + // Reads the preview flag and field name list, and position in the middle of + // the result element for reading actual results later. + // Return value indicates whether the next 'results' element is found. + boolean readIntoNextResultsElement() + throws XMLStreamException, IOException { + XMLEvent xmlEvent = readToStartOfElementWithName("results"); + if (xmlEvent == null) { + return false; + } + + if (xmlEvent != null && + xmlEvent.asStartElement() + .getAttributeByName(QName.valueOf("preview")) + .getValue() + .equals("0") ){ + isPreview = false; + } else { + isPreview = true; + } + + // Read <meta> element. + final String meta = "meta"; + if (readToStartOfElementAtSameLevelWithName(meta) != null) { + readFieldOrderElement(); + readToEndElementWithName(meta); + } + return true; + } + + XMLEvent readToStartOfElementWithName(String elementName) + throws XMLStreamException { + while (xmlReader.hasNext()) { + XMLEvent xmlEvent = xmlReader.nextEvent(); + int eType = xmlEvent.getEventType(); + if (eType != XMLStreamConstants.START_ELEMENT){ + continue; + } + + StartElement startElement = xmlEvent.asStartElement(); + if(startElement + .getName() + .getLocalPart() + .equals(elementName)){ + return xmlEvent; + } + } + return null; + } + + void readToEndElementWithName(String elementName) throws XMLStreamException { + XMLEvent xmlEvent; + int eType; + + while (xmlReader.hasNext()) { + xmlEvent = xmlReader.nextEvent(); + eType = xmlEvent.getEventType(); + switch (eType) { + case XMLStreamConstants.START_ELEMENT: + break; + case XMLStreamConstants.END_ELEMENT: + if (xmlEvent.asEndElement() + .getName() + .getLocalPart() + .equals(elementName)) { + return; + } + break; + default: + break; + } + } + + throw new RuntimeException("End tag of " + elementName + " not found."); + } + + /** + * Reads to the next specified start element at the same level. The reader + * stops past that element if it is found. Otherwise, the reader stops + * before the end element of the current level. + * + * @param elementName The name of the start element. + * @return The start element, or {@code null} if not found. + * @throws XMLStreamException + */ + XMLEvent readToStartOfElementAtSameLevelWithName(String elementName) + throws XMLStreamException { + XMLEvent xmlEvent; + int eType; + int level = 0; + while (xmlReader.hasNext()) { + xmlEvent = xmlReader.peek(); + eType = xmlEvent.getEventType(); + switch (eType) { + case XMLStreamConstants.START_ELEMENT: + if (level++ > 0){ + break; + } + StartElement startElement = xmlEvent.asStartElement(); + if (startElement + .getName() + .getLocalPart() + .equals(elementName)) { + xmlReader.nextEvent(); + return xmlEvent; + } + break; + case XMLStreamConstants.END_ELEMENT: + if (level-- == 0) { + return null; + } + break; + default: + break; + } + xmlReader.nextEvent(); + } + + throw new RuntimeException("Parent end element not found:" + elementName); + } + + // At the end, move off the end element of 'fieldOrder' + private void readFieldOrderElement() + throws IOException, XMLStreamException { + XMLEvent xmlEvent; + int eType; + int level = 0; + + if (readToStartOfElementAtSameLevelWithName("fieldOrder") == null) + return; + + while (xmlReader.hasNext()) { + xmlEvent = xmlReader.nextEvent(); + eType = xmlEvent.getEventType(); + switch (eType) { + case XMLStreamConstants.START_ELEMENT: + level++; + break; + case XMLStreamConstants.END_ELEMENT: + if (xmlEvent.asEndElement() + .getName() + .getLocalPart() + .equals("fieldOrder")) { + return; + } + level--; + break; + case XMLStreamConstants.CHARACTERS: + if (level == 1) { + fields.add(xmlEvent.asCharacters().getData()); + } + break; + default: + break; + } + } + + throw new RuntimeException("End tag of fieldOrder not found."); + } + + // At the end, move off the end tag of 'result' + private Event getResultKVPairs() + throws IOException, XMLStreamException { + + Event returnData = new Event(); + XMLEvent xmlEvent; + int eType; + String key = null; + List<String> values = new ArrayList<>(); + int level = 0; + + // Event results are flat, so extract k/v pairs based on XML indentation + // level throwing away the uninteresting non-data. + + while (xmlReader.hasNext()) { + xmlEvent = xmlReader.nextEvent(); + eType = xmlEvent.getEventType(); + switch (eType) { + case XMLStreamConstants.START_ELEMENT: + final StartElement startElement = xmlEvent.asStartElement(); + @SuppressWarnings("unchecked") + Iterator<Attribute> attrIttr = + startElement.getAttributes(); + if (level == 0) { + if (attrIttr.hasNext()) + key = attrIttr.next().getValue(); + } else if (level == 1 && + key.equals("_raw") && + startElement + .getName() + .getLocalPart() + .equals("v")) { + StringBuilder asString = new StringBuilder(); + StringWriter asXml = new StringWriter(); + readSubtree(startElement, asString, asXml); + values.add(asString.toString()); + returnData.putSegmentedRaw(asXml.toString()); + level--; + } + level++; + break; + case XMLStreamConstants.END_ELEMENT: + if (xmlEvent.asEndElement() + .getName() + .getLocalPart() + .equals("result")) + return returnData; + + if (--level == 0) { + String[] valuesArray = + values.toArray(new String[values.size()]); + returnData.putArray(key, valuesArray); + + key = null; + values.clear(); + } + break; + case XMLStreamConstants.CHARACTERS: + if (level > 1) { + values.add(xmlEvent.asCharacters().getData()); + } + break; + default: + break; + } + } + + throw new RuntimeException("End tag of 'result' not found."); + } + + @Override boolean advanceStreamToNextSet() throws IOException { + // Handle empty stream or other cases where xmlReader is + // not constructed. + if (xmlReader == null) { + return false; + } + try { + return readIntoNextResultsElement(); + } catch (XMLStreamException e) { + throw new RuntimeException(e); + } catch (NullPointerException e) { + // Invalid xml (<doc> and multiple <results> may results in + // this exception in the xml reader with JDK 1.7 at: + // com.sun.org.apache.xerces.internal.impl.XMLEntityScanner.load(XMLEntityScanner.java:1748) + return false; + } catch (ArrayIndexOutOfBoundsException e) { + // Invalid xml (<doc> and multiple <results> may results in + // this exception in the xml reader with JDK 1.6 at: + // com.sun.org.apache.xerces.internal.impl.XMLDocumentFragmentScannerImpl.endEntity(XMLDocumentFragmentScannerImpl.java:904) + return false; + } + } + + /** + * Read the whole element including those contained in the outer element. + * @param startElement start element (tag) of the outer element. + * @param asString output builder that the element's inner-text + * will be appended to, with markup removed and + * characters un-escaped + * @param asXml output builder that full xml including markups + * will be appended to. Characters are escaped as + * needed. + * @throws IOException + * @throws XMLStreamException + */ + void readSubtree( + StartElement startElement, + StringBuilder asString, + StringWriter asXml) + throws IOException, XMLStreamException { + XMLEventWriter xmlWriter = XMLOutputFactory.newInstance(). + createXMLEventWriter(asXml); + XMLEvent xmlEvent = startElement; + int level = 0; + do { + xmlWriter.add(xmlEvent); + int eType = xmlEvent.getEventType(); + switch (eType) { + case XMLStreamConstants.START_ELEMENT: + level++; + break; + case XMLStreamConstants.END_ELEMENT: + if (--level == 0) { + xmlWriter.close(); + return; + } + break; + case XMLStreamConstants.CHARACTERS: + asString.append(xmlEvent.asCharacters().getData()); + default: + break; + } + xmlEvent = xmlReader.nextEvent(); + } while (xmlReader.hasNext()); + throw new RuntimeException("Invalid XML format."); + } +} + diff --git a/splunk/src/main/java/com/splunk/SavedSearchCollectionArgs.java b/splunk/src/main/java/com/splunk/SavedSearchCollectionArgs.java index d846e19d..cdb62914 100644 --- a/splunk/src/main/java/com/splunk/SavedSearchCollectionArgs.java +++ b/splunk/src/main/java/com/splunk/SavedSearchCollectionArgs.java @@ -1,53 +1,53 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -/** - * The {@code SavedSearchCollectionArgs} class contains arguments getting a - * collection of saved searches. - */ -public class SavedSearchCollectionArgs extends CollectionArgs { - - /** - * Class constructor. - */ - public SavedSearchCollectionArgs() { super(); } - - /* BEGIN AUTOGENERATED CODE */ - - /** - * Sets the earliest time for which to display the scheduled times for scheduled searches (not just the next run time). - * - * @param earliestTime - * The earliest time. - */ - public void setEarliestTime(String earliestTime) { - this.put("earliest_time", earliestTime); - } - - /** - * Sets the latest time until which to display the scheduled times for scheduled searches (not just the next run time). - * - * @param latestTime - * The latest time. - */ - public void setLatestTime(String latestTime) { - this.put("latest_time", latestTime); - } - - /* END AUTOGENERATED CODE */ -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +/** + * The {@code SavedSearchCollectionArgs} class contains arguments getting a + * collection of saved searches. + */ +public class SavedSearchCollectionArgs extends CollectionArgs { + + /** + * Class constructor. + */ + public SavedSearchCollectionArgs() { super(); } + + /* BEGIN AUTOGENERATED CODE */ + + /** + * Sets the earliest time for which to display the scheduled times for scheduled searches (not just the next run time). + * + * @param earliestTime + * The earliest time. + */ + public void setEarliestTime(String earliestTime) { + this.put("earliest_time", earliestTime); + } + + /** + * Sets the latest time until which to display the scheduled times for scheduled searches (not just the next run time). + * + * @param latestTime + * The latest time. + */ + public void setLatestTime(String latestTime) { + this.put("latest_time", latestTime); + } + + /* END AUTOGENERATED CODE */ +} diff --git a/splunk/src/main/java/com/splunk/SearchResults.java b/splunk/src/main/java/com/splunk/SearchResults.java index 0dfa6060..1feeb85f 100644 --- a/splunk/src/main/java/com/splunk/SearchResults.java +++ b/splunk/src/main/java/com/splunk/SearchResults.java @@ -1,38 +1,38 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.Collection; - -/** - * The {@code SearchResults} interface represents Splunk search results. - */ -public interface SearchResults extends Iterable<Event> { - /** - * Indicates whether the results are a preview from an unfinished search. - * @return {@code true} if the results are a preview, {@code false} if not. - */ - public boolean isPreview(); - - /** - * Returns a collection of field names from the results. - * @return A collection of field names. - * <p> - * Note that any given result will contain a subset of these fields. - */ - public Collection<String> getFields(); -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.Collection; + +/** + * The {@code SearchResults} interface represents Splunk search results. + */ +public interface SearchResults extends Iterable<Event> { + /** + * Indicates whether the results are a preview from an unfinished search. + * @return {@code true} if the results are a preview, {@code false} if not. + */ + public boolean isPreview(); + + /** + * Returns a collection of field names from the results. + * @return A collection of field names. + * <p> + * Note that any given result will contain a subset of these fields. + */ + public Collection<String> getFields(); +} diff --git a/splunk/src/main/java/com/splunk/Service.java b/splunk/src/main/java/com/splunk/Service.java index 0e006f11..c8087070 100644 --- a/splunk/src/main/java/com/splunk/Service.java +++ b/splunk/src/main/java/com/splunk/Service.java @@ -1,1477 +1,1477 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.IOException; -import java.io.InputStream; -import java.io.UnsupportedEncodingException; -import java.net.*; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - * The {@code Service} class represents a Splunk service instance at a given - * address (host:port), accessed using the {@code http} or {@code https} - * protocol scheme. - * <p> - * A {@code Service} instance also captures an optional namespace context - * consisting of an optional owner name (or "-" wildcard) and optional app name - * (or "-" wildcard). - * <p> - * To access {@code Service} members, the {@code Service} instance must be - * authenticated by presenting credentials using the {@code login} method, or - * by constructing the {@code Service} instance using the {@code connect} - * method, which both creates and authenticates the instance. - */ -public class Service extends BaseService { - /** The current app context. */ - protected String app = null; - - /** The current session token. */ - protected String token = null; - - /** The current owner context. A value of "nobody" means that all users - * have access to the resource. - */ - protected String owner = null; - - /** The Splunk account username, which is used to authenticate the Splunk - * instance. */ - protected String username = null; - - /** The password, which is used to authenticate the Splunk instance. */ - protected String password = null; - - /** The default simple receiver endpoint. */ - protected String simpleReceiverEndPoint = "/services/receivers/simple"; - - /** The default password endpoint, can change over Splunk versions. */ - protected String passwordEndPoint = "admin/passwords"; - - /** The version of this Splunk instance, once logged in. */ - public String version = null; - - /** The type of this Splunk instance, once logged in. */ - public String instanceType = null; - - /** The default host name, which is used when a host name is not provided.*/ - public static String DEFAULT_HOST = "localhost"; - - /** The default port number, which is used when a port number is not - * provided. */ - public static int DEFAULT_PORT = 8089; - - /** The default scheme, which is used when a scheme is not provided. */ - public static String DEFAULT_SCHEME = "https"; - - /** Flag to notify SDK to try for re-login if the session has expired API call*/ - protected boolean autologin = false; - - /** - * Creates a new {@code Service} instance using a host. - * - * @param host The host name. - */ - public Service(String host) { - super(host); - } - - /** - * Creates a new {@code Service} instance using a host and port. - * - * @param host The host name. - * @param port The port number. - */ - public Service(String host, int port) { - super(host, port); - } - - /** - * Creates a new {@code Service} instance using a host, port, and - * scheme for accessing the service ({@code http} or {@code https}). - * - * @param host The host name. - * @param port The port number. - * @param scheme The scheme ({@code http} or {@code https}). - */ - public Service(String host, int port, String scheme) { - super(host, port, scheme); - } - - /** - * Constructs a new {@code Service} instance using the given host, - * port, and scheme, and instructing it to use the specified HTTPS handler. - * - * @param host The host name of the service. - * @param port The port number of the service. - * @param scheme Scheme for accessing the service ({@code http} or - * {@code https}). - * @param httpsHandler The URLStreamHandler instance - */ - public Service(String host, int port, String scheme, - URLStreamHandler httpsHandler) { - this.host = host; - this.port = port; - this.scheme = scheme; - this.httpsHandler = httpsHandler; - } - - /** - * Creates a new {@code Service} instance using a collection of arguments. - * - * @param args The {@code ServiceArgs} to initialize the service. - */ - // NOTE: This overload exists primarily to provide better documentation - // for the "args" parameter. - @SuppressWarnings("deprecation") - public Service(ServiceArgs args) { - super(); - // NOTE: Must read the deprecated fields for backward compatibility. - // (Consider the case where the fields are initialized directly, - // rather than using the new setters.) - // NOTE: Must also read the underlying dictionary for forward compatibility. - // (Consider the case where the user calls Map.put() directly, - // rather than using the new setters.) - this.app = Args.get(args, "app", args.app != null ? args.app : null); - this.host = Args.get(args, "host", args.host != null ? args.host : DEFAULT_HOST); - this.owner = Args.get(args, "owner", args.owner != null ? args.owner : null); - this.port = Args.<Integer>get(args, "port", args.port != null ? args.port : DEFAULT_PORT); - this.scheme = Args.get(args, "scheme", args.scheme != null ? args.scheme : DEFAULT_SCHEME); - this.token = Args.get(args, "token", args.token != null ? args.token : null); - this.username = (String)args.get("username"); - this.password = (String)args.get("password"); - this.autologin = Args.<Boolean>get(args, "autologin", false); - this.httpsHandler = Args.get(args, "httpsHandler", null); - this.setSslSecurityProtocol(Args.get(args, "SSLSecurityProtocol", Service.getSslSecurityProtocol())); - this.addCookie((String)args.get("cookie")); - this.setCustomHeaders((Map<String, String>) args.get("customHeaders")); - } - - /** - * Creates a new {@code Service} instance using a map of arguments. - * - * @param args A {@code Map} of arguments to initialize the service. - */ - public Service(Map<String, Object> args) { - super(); - this.app = Args.get(args, "app", null); - this.host = Args.get(args, "host", DEFAULT_HOST); - this.owner = Args.get(args, "owner", null); - this.port = Args.<Integer>get(args, "port", DEFAULT_PORT); - this.scheme = Args.get(args, "scheme", DEFAULT_SCHEME); - this.token = Args.get(args, "token", null); - this.username = (String)args.get("username"); - this.password = (String)args.get("password"); - this.autologin = Args.<Boolean>get(args, "autologin", false); - this.httpsHandler = Args.get(args, "httpsHandler", null); - this.setSslSecurityProtocol(Args.get(args, "SSLSecurityProtocol", Service.getSslSecurityProtocol())); - this.addCookie((String)args.get("cookie")); - this.connectTimeout = Args.<Integer>get(args, "connectTimeout", null); - this.readTimeout = Args.<Integer>get(args, "readTimeout", null); - } - - /** - * Establishes a connection to a Splunk service using a map of arguments. - * This member creates a new {@code Service} instance and authenticates - * the session using credentials passed in from the {@code args} map. - * - * @param args The {@code args} map. - * @return A new {@code Service} instance. - */ - public static Service connect(Map<String, Object> args) { - Service service = new Service(args); - if (args.containsKey("username")) { - service.login(); - } - return service; - } - - /** - * Runs an export search (using the {@code search/jobs/export} endpoint), - * and streams results back in an input stream. - * - * @param search The search query to run. - * @return The {@code InputStream} object that contains the search results. - */ - public InputStream export(String search) { - return export(search, null); - } - - /** - * Runs an export search with arguments (using the {@code search/jobs/export} - * endpoint), and streams results back in an input stream. - * - * @param search The search query to run. - * @param args Additional search arguments. - * For a list of possible parameters, see - * <a href="http://dev.splunk.com/view/SP-CAAAEHQ#savedsearchparams" - * target="_blank">Saved search parameters</a> on - * <a href="http://dev.splunk.com/view/SP-CAAAEHQ" - * target="_blank">dev.splunk.com</a>. - * @return The {@code InputStream} object that contains the search results. - */ - public InputStream export(String search, Map args) { - args = Args.create(args).add("search", search); - // By default don't highlight search terms in the output. - if (!args.containsKey("segmentation")) { - args.put("segmentation", "none"); - } - ResponseMessage response; - - if(enableV2SearchApi()) - response = post(JobCollection.REST_PATH_V2 + "/export", args); - else { - response = post(JobCollection.REST_PATH + "/export", args); - } - return new ExportResultsStream(response.getContent()); - } - - /** - * Runs an export search with arguments (using the {@code search/jobs/export} - * endpoint), and streams results back in an input stream. - * - * @param search The search query to run. - * @param args Additional search arguments (see {@code JobExportArgs}). - * @return The {@code InputStream} object that contains the search results. - */ - // NOTE: This overload exists primarily to provide better documentation - // for the "args" parameter. - public InputStream export(String search, JobExportArgs args) { - return export(search, (Map<String, Object>) args); - } - - /** - * Ensures that the given path is fully qualified, prepending a path - * prefix if necessary. The path prefix is constructed using the current - * owner and app context when available. - * - * @param path The path to verify. - * @return A fully-qualified resource path. - */ - String fullpath(String path) { - return fullpath(path, null); - } - - /** - * Ensures that a given path is fully qualified, prepending a path - * prefix if necessary. The path prefix is constructed using the - * current owner and app context when available. - * - * @param path The path to verify. - * @param namespace The namespace dictionary (<i>app, owner, sharing</i>). - * @return A fully-qualified resource path. - */ - public String fullpath(String path, Args namespace) { - - // if already fully qualified (i.e. root begins with /) then return - // the already qualified path. - if (path.startsWith("/")) - return path; - - // if no namespace at all, and no service instance of app, and no - // sharing, return base service endpoint + path. - if (namespace == null && app == null) { - return "/services/" + path; - } - - // base namespace values - String localApp = app; - String localOwner = owner; - String localSharing = ""; - - // override with invocation namespace if set. - try{ - if (namespace != null) { - // URL encode the owner and app. - if (namespace.containsKey("app")) { - localApp = URLEncoder.encode((String)namespace.get("app"), "UTF-8"); - } - if (namespace.containsKey("owner")) { - localOwner = URLEncoder.encode((String)namespace.get("owner"), "UTF-8"); - } - if (namespace.containsKey("sharing")) { - localSharing = (String)namespace.get("sharing"); - } - } - }catch (UnsupportedEncodingException e) { - // This is unreachable, since UTF-8 is always supported. - assert false; - } - - - // sharing, if set calls for special mapping, override here. - // "user" --> {user}/{app} - // "app" --> nobody/{app} - // "global" --> nobody/{app} - // "system" --> nobody/system - if (localSharing.equals("app") || localSharing.equals("global")) - localOwner = "nobody"; - else if (localSharing.equals("system")) { - localApp = "system"; - localOwner = "nobody"; - } - - return String.format("/servicesNS/%s/%s/%s", - localOwner == null ? "-" : localOwner, - localApp == null ? "-" : localApp, - path); - } - - /** - * Returns the app context for this {@code Service} instance. - * A {@code null} value indicates no app context, and a value of - * {@code "-"} indicates an app wildcard. - * - * @return The app context. - */ - public String getApp() { - return this.app; - } - - /** - * Returns the collection of applications. - * - * @return The application collection. - */ - public EntityCollection<Application> getApplications() { - return new EntityCollection<>( - this, "/services/apps/local", Application.class); - } - - /** - * Returns the collection of configurations. - * - * @return The configurations collection. - */ - public ConfCollection getConfs() { - return getConfs(null); - } - - /** - * Returns the collection of configurations. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return The configurations collection. - */ - public ConfCollection getConfs(Args args) { - return new ConfCollection(this, args); - } - - /** - * Returns an array of system capabilities. - * - * @return An array of capabilities. - */ - public String[] getCapabilities() { - Entity caps = new Entity(this, "authorization/capabilities"); - return caps.getStringArray("capabilities"); - } - - /** - * Returns the collection of data models. - * @return DataModelCollection instance - */ - public DataModelCollection getDataModels() { - return new DataModelCollection(this); - } - - /** - * Returns the configuration and status of a deployment client. - * - * @return The configuration and status. - */ - public DeploymentClient getDeploymentClient() { - return new DeploymentClient(this); - } - - /** - * Returns the configuration of all deployment servers. - * - * @return The configuration of deployment servers. - */ - public EntityCollection<DeploymentServer> getDeploymentServers() { - return getDeploymentServers(null); - } - - /** - * Returns the collection of deployment servers. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return The configuration of deployment servers. - */ - public EntityCollection<DeploymentServer> getDeploymentServers(Args args) { - String path; - if (versionIsEarlierThan("6.0.0")) { - path = "deployment/server"; - } else { - path = ""; // TODO: Find out what this should be and fix it. - } - return new EntityCollection<>( - this, "deployment/server", DeploymentServer.class, args); - } - - /** - * Returns a collection of class configurations for a deployment server. - * - * @return A collection of class configurations. - */ - public EntityCollection<DeploymentServerClass> getDeploymentServerClasses(){ - return getDeploymentServerClasses(null); - } - - /** - * Returns a collection of class configurations for a deployment server. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of server class configurations. - */ - public EntityCollection<DeploymentServerClass> getDeploymentServerClasses( - Args args) { - String path; - if (versionIsEarlierThan("6.0.0")) { - path = "deployment/serverclass"; - } else { - path = "deployment/server/serverclasses"; - } - return new EntityCollection<>( - this, path, DeploymentServerClass.class, args); - } - - /** - * Returns a collection of multi-tenant configurations. - * - * @return A collection of multi-tenant configurations. - */ - public EntityCollection<DeploymentTenant> getDeploymentTenants() { - return getDeploymentTenants(null); - } - - /** - * Returns a collection of multi-tenant configurations. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of multi-tenant configurations. - */ - public EntityCollection<DeploymentTenant> getDeploymentTenants(Args args) { - return new EntityCollection<>( - this, "deployment/tenants", DeploymentTenant.class, args); - } - - /** - * Returns information about distributed search options. - * - * @return Distributed search information. - */ - public DistributedConfiguration getDistributedConfiguration() { - return new DistributedConfiguration(this); - } - - /** - * Returns a collection of distributed search peers. A <i>search peer</i> - * is a Splunk server to which another Splunk server distributes searches. - * The Splunk server where the search originates is referred to as the - * <i>search head</i>. - * - * @return A collection of search peers. - */ - public EntityCollection<DistributedPeer> getDistributedPeers() { - return getDistributedPeers(null); - } - - /** - * Returns a collection of distributed search peers. A <i>search peer</i> - * is a Splunk server to which another Splunk server distributes searches. - * The Splunk server where the search originates is referred to as the - * <i>search head</i>. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of search peers. - */ - public EntityCollection<DistributedPeer> getDistributedPeers(Args args) { - return new EntityCollection<>( - this, "search/distributed/peers", DistributedPeer.class, args); - } - - - /** - * Returns a collection of saved event types. - * - * @return A collection of saved event types. - */ - public EventTypeCollection getEventTypes() { - return getEventTypes(null); - } - - /** - * Returns a collection of saved event types. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of saved event types. - */ - public EventTypeCollection getEventTypes(Args args) { - return new EventTypeCollection(this, args); - } - - /** - * Returns a collection of alerts that have been fired by the service. - * - * @return A collection of fired alerts. - */ - public FiredAlertGroupCollection getFiredAlertGroups() { - return getFiredAlertsGroups(null); - } - - /** - * Returns a collection of alerts that have been fired by the service. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of fired alerts. - */ - public FiredAlertGroupCollection getFiredAlertsGroups(Args args) { - return new FiredAlertGroupCollection(this, args); - } - - /** - * Returns a collection of Splunk indexes. - * - * @return A collection of indexes. - */ - public IndexCollection getIndexes() { - return getIndexes((IndexCollectionArgs)null); - } - - /** - * Returns a collection of Splunk indexes. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link IndexCollectionArgs}. - * @return A collection of indexes. - */ - // NOTE: This overload exists primarily to provide better documentation - // for the "args" parameter. - public IndexCollection getIndexes(IndexCollectionArgs args) { - return getIndexes((Args)args); - } - - /** - * Returns a collection of Splunk indexes. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link IndexCollectionArgs}. - * @return A collection of indexes. - */ - public IndexCollection getIndexes(Args args) { - return new IndexCollection(this, args); - } - - /** - * Returns information about the Splunk service. - * - * @return Splunk service information. - */ - public ServiceInfo getInfo() { - return new ServiceInfo(this); - } - - /** - * Returns list of all applicable Cluster Master Hosts for the SearchHead Service. - * - * @return List of Cluster Master Host(s). - */ - public List<String> getClusterMasters(){ - Entity caps = new Entity(this, "cluster/config"); - List<String> hosts = new ArrayList<>(); - try { - String clusterMasterURIs = caps.getString("master_uri"); - URL clusterMasterUrl; - //for multi-cluster environment, there might be more than cluster master for the searchHead - if(clusterMasterURIs.contains(",")){ - String[] masterURIs = clusterMasterURIs.split(","); - for(String uri : masterURIs){ - clusterMasterUrl = new URL(uri); - hosts.add(clusterMasterUrl.getHost()); - } - }else { - clusterMasterUrl = new URL(clusterMasterURIs); - hosts.add(clusterMasterUrl.getHost()); - } - return hosts; - } catch (MalformedURLException e) { - e.printStackTrace(); - } - return hosts; - } - - /** - * Returns a collection of configured inputs. - * - * @return A collection of inputs. - */ - public InputCollection getInputs() { - return getInputs(null); - } - - /** - * Returns a collection of configured inputs. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of inputs. - */ - public InputCollection getInputs(Args args) { - return new InputCollection(this, args); - } - - /** - * Returns a collection of current search jobs. - * - * @return A collection of search jobs. - */ - public JobCollection getJobs() { - return getJobs((CollectionArgs)null); - } - - /** - * Returns a collection of current search jobs. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of search jobs. - */ - // NOTE: This overload exists primarily to provide better documentation - // for the "args" parameter. - public JobCollection getJobs(CollectionArgs args) { - return getJobs((Args)args); - } - - /** - * Returns a collection of current search jobs. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of search jobs. - */ - public JobCollection getJobs(Args args) { - return new JobCollection(this, args); - } - - /** - * Returns a Job by the provided sid. - * - * @param sid The sid for a job. - * @return A Job. - */ - public Job getJob(String sid) { - return new Job(this, JobCollection.REST_PATH + "/" + sid); - } - - /** - * Returns a collection of license group configurations. - * - * @return A collection of license group configurations. - */ - public EntityCollection<LicenseGroup> getLicenseGroups() { - return getLicenseGroups(null); - } - - /** - * Returns a collection of license group configurations. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of license group configurations. - */ - public EntityCollection<LicenseGroup> getLicenseGroups(Args args) { - return new EntityCollection<>( - this, "licenser/groups", LicenseGroup.class, args); - } - - /** - * Returns a collection of messages from the licenser. - * - * @return A collection of licenser messages. - */ - public EntityCollection<LicenseMessage> getLicenseMessages() { - return getLicenseMessages(null); - } - - /** - * Returns a collection of messages from the licenser. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of licenser messages. - */ - public EntityCollection<LicenseMessage> getLicenseMessages(Args args) { - return new EntityCollection<>( - this, "licenser/messages", LicenseMessage.class, args); - } - - /** - * Returns the current owner context for this {@code Service} instance. - * A value of {@code "-"} indicates a wildcard, and a {@code null} value - * indicates no owner context. - * - * @return The current owner context. - */ - public String getOwner() { - return this.owner; - } - - /** - * Returns a collection of licenser pool configurations. - * - * @return A collection of licenser pool configurations. - */ - public LicensePoolCollection getLicensePools() { - return getLicensePools(null); - } - - /** - * Returns a collection of licenser pool configurations. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of licenser pool configurations. - */ - public LicensePoolCollection getLicensePools(Args args) { - return new LicensePoolCollection(this, args); - } - - /** - * Returns a collection of slaves reporting to this license master. - * - * @return A collection of licenser slaves. - */ - public EntityCollection<LicenseSlave> getLicenseSlaves() { - return getLicenseSlaves(null); - } - - /** - * Returns a collection of slaves reporting to this license master. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of licenser slaves. - */ - public EntityCollection<LicenseSlave> getLicenseSlaves(Args args) { - return new EntityCollection<>( - this, "licenser/slaves", LicenseSlave.class, args); - } - - /** - * Returns a collection of license stack configurations. - * - * @return A collection of license stack configurations. - */ - public EntityCollection<LicenseStack> getLicenseStacks() { - return getLicenseStacks(null); - } - - /** - * Returns a collection of license stack configurations. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of license stack configurations. - */ - public EntityCollection<LicenseStack> getLicenseStacks(Args args) { - return new EntityCollection<>( - this, "licenser/stacks", LicenseStack.class, args); - } - - /** - * Returns a collection of licenses for this service. - * - * @return A collection of licenses. - */ - public EntityCollection<License> getLicenses() { - return getLicenses(null); - } - - /** - * Returns a collection of licenses for this service. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of licenses. - */ - public EntityCollection<License> getLicenses(Args args) { - return new EntityCollection<>( - this, "licenser/licenses", License.class, args); - } - - /** - * Returns a collection of service logging categories and their status. - * - * @return A collection of logging categories. - */ - public EntityCollection<Logger> getLoggers() { - return getLoggers(null); - } - - /** - * Returns a collection of service logging categories and their status. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of logging categories. - */ - public EntityCollection<Logger> getLoggers(Args args) { - return new EntityCollection<>( - this, "server/logger", Logger.class, args); - } - - /** - * Returns a collection of system messages. - * - * @return A collection of system messages. - */ - public MessageCollection getMessages() { - return getMessages(null); - } - - /** - * Returns a collection of system messages. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of system messages. - */ - public MessageCollection getMessages(Args args) { - return new MessageCollection(this, args); - } - - /** - * Returns a collection of modular inputs. - * - * @return A collection of modular inputs. - */ - public ResourceCollection<ModularInputKind> getModularInputKinds() { - return getModularInputKinds(null); - } - - /** - * Returns a collection of modular inputs. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of modular inputs. - */ - public ResourceCollection<ModularInputKind> getModularInputKinds(Args args) { - return new ResourceCollection<>( - this, "data/modular-inputs", ModularInputKind.class, args); - } - - /** - * Returns global TCP output properties. - * - * @return Global TCP output properties. - */ - public OutputDefault getOutputDefault() { - return new OutputDefault(this); - } - - /** - * Returns a collection of output group configurations. - * - * @return A collection of output group configurations. - */ - public EntityCollection<OutputGroup> getOutputGroups() { - return getOutputGroups(null); - } - - /** - * Returns a collection of output group configurations. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of output group configurations. - */ - public EntityCollection<OutputGroup> getOutputGroups(Args args) { - return new EntityCollection<>( - this, "data/outputs/tcp/group", OutputGroup.class, args); - } - - /** - * Returns a collection of data-forwarding configurations. - * - * @return A collection of data-forwarding configurations. - */ - public EntityCollection<OutputServer> getOutputServers() { - return getOutputServers(null); - } - - /** - * Returns a collection of data-forwarding configurations. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of data-forwarding configurations. - */ - public EntityCollection<OutputServer> getOutputServers(Args args) { - return new EntityCollection<>( - this, "data/outputs/tcp/server", OutputServer.class, args); - } - - /** - * Returns a collection of configurations for forwarding data in standard - * syslog format. - * - * @return A collection of syslog forwarders. - */ - public EntityCollection<OutputSyslog> getOutputSyslogs() { - return getOutputSyslogs(null); - } - - /** - * Returns a collection of configurations for forwarding data in standard - * syslog format. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of syslog forwarders. - */ - public EntityCollection<OutputSyslog> getOutputSyslogs(Args args) { - return new EntityCollection<>( - this, "data/outputs/tcp/syslog", OutputSyslog.class, args); - } - - /** - * Returns the current password that was used to authenticate the session. - * - * @return The current password. - */ - public String getPassword() { - return this.password; - } - - /** - * Returns a collection of passwords. This collection is used for managing - * secure credentials. - * - * @return A collection of passwords. - */ - public PasswordCollection getPasswords() { - return getPasswords(null); - } - - /** - * Returns a collection of passwords. This collection is used for managing - * secure credentials. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of passwords. - */ - public PasswordCollection getPasswords(Args args) { - return new PasswordCollection(this, args); - } - - /** - * Returns the receiver object for the Splunk service. - * - * @return A Splunk receiver object. - */ - public Receiver getReceiver() { - return new Receiver(this); - } - - /** - * Returns a collection of Splunk user roles. - * - * @return A collection of user roles. - */ - public EntityCollection<Role> getRoles() { - return getRoles(null); - } - - /** - * Returns a collection of Splunk user roles. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of user roles. - */ - public EntityCollection<Role> getRoles(Args args) { - return new EntityCollection<>( - this, "authorization/roles", Role.class, args); - } - - /** - * Returns a collection of saved searches. - * - * @return A collection of saved searches. - */ - public SavedSearchCollection getSavedSearches() { - return getSavedSearches((SavedSearchCollectionArgs)null); - } - - /** - * Returns a collection of saved searches. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link SavedSearchCollectionArgs}. - * @return A collection of saved searches. - */ - // NOTE: This overload exists primarily to provide better documentation - // for the "args" parameter. - public SavedSearchCollection getSavedSearches(SavedSearchCollectionArgs args) { - return getSavedSearches((Args)args); - } - - /** - * Returns a collection of saved searches. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of saved searches. - */ - public SavedSearchCollection getSavedSearches(Args args) { - return new SavedSearchCollection(this, args); - } - - /** - * Returns a Saved Search by the provided title key. - * - * @param title The title for a job. - * @return A SavedSearch. - */ - public SavedSearch getSavedSearch(String title) { - return new SavedSearch(this, JobCollection.REST_PATH + "/" + title); - } - - /** - * Returns service configuration information for an instance of Splunk. - * - * @return Service configuration information. - */ - public Settings getSettings() { - return new Settings(this); - } - - /** - * Returns the current session token. Session tokens can be shared across - * multiple {@code Service} instances. - * - * @return The session token. - */ - public String getToken() { - return this.token; - } - - /** - * Returns a collection of in-progress oneshot uploads. - * - * @return A collection of in-progress oneshot uploads - */ - public EntityCollection<Upload> getUploads() { - return getUploads(null); - } - - /** - * Returns a collection of in-progress oneshot uploads. - * - * @param namespace This collection's namespace; there are no other - * optional arguments for this endpoint. - * @return A collection of in-progress oneshot uploads - */ - public EntityCollection<Upload> getUploads(Args namespace) { - return new EntityCollection<>( - this, "data/inputs/oneshot", Upload.class, namespace); - } - - /** - * Returns the Splunk account username that was used to authenticate the - * current session. - * - * @return The current username. - */ - public String getUsername() { - return this.username; - } - - /** - * Returns a collection of Splunk users. - * - * @return A collection of users. - */ - public UserCollection getUsers() { - return getUsers(null); - } - - /** - * Returns a collection of Splunk users. - * - * @param args Collection arguments that specify the number of entities to - * return and how to sort them. See {@link CollectionArgs}. - * @return A collection of users. - */ - public UserCollection getUsers(Args args) { - return new UserCollection(this, args); - } - - /** - * Authenticates the {@code Service} instance with the username and password - * that were specified when the instance was created. - * - * Three cases: - * 1. If we have a cookie, but are missing username and/or password, login is noop - * 2. If we don't have a cookie, and are missing username and/or password we can't login - * 3. Otherwise login as usual - * - * @return The current {@code Service} instance. - */ - public Service login() { - if (this.cookieStore.hasSplunkAuthCookie() && (this.username == null || this.password == null)) { - return this; - } - else if (this.username == null || this.password == null) { - throw new IllegalStateException("Missing username or password."); - } - else { - return login(this.username, this.password); - } - } - - /** - * Authenticates the {@code Service} instance with a specified username and - * password. Note that these values override any previously-set values for - * username and password. - * - * @param username The Splunk account username. - * @param password The password for the username. - * @return The current {@code Service} instance. - */ - public Service login(String username, String password) { - this.username = username; - this.password = password; - - Args args = new Args(); - args.put("username", username); - args.put("password", password); - args.put("cookie", "1"); - ResponseMessage response = post("/services/auth/login", args); - String sessionKey = Xml.parse(response.getContent()) - .getElementsByTagName("sessionKey") - .item(0) - .getTextContent(); - this.token = "Splunk " + sessionKey; - ServiceInfo serviceInfoEntity = this.getInfo(); - this.version = serviceInfoEntity.getVersion(); - this.instanceType = serviceInfoEntity.getInstanceType(); - if (versionCompare("4.3") >= 0) - this.passwordEndPoint = "storage/passwords"; - - return this; - } - - /** - * Forgets the current session token. - * - * @return The current {@code Service} instance. - */ - public Service logout() { - this.token = null; - this.removeAllCookies(); - return this; - } - - /** - * Creates a oneshot synchronous search. - * - * @param query The search query. - * @return The search results. - */ - public InputStream oneshotSearch(String query) { - return oneshotSearch(query, null); - } - - /** - * Creates a oneshot synchronous search using search arguments. - * - * @param query The search query. - * @param args The search arguments:<ul> - * <li>"output_mode": Specifies the output format of the results (XML, JSON, - * or CSV).</li> - * <li>"earliest_time": Specifies the earliest time in the time range to - * search. The time string can be a UTC time (with fractional seconds), a - * relative time specifier (to now), or a formatted time string.</li> - * <li>"latest_time": Specifies the latest time in the time range to search. - * The time string can be a UTC time (with fractional seconds), a relative - * time specifier (to now), or a formatted time string.</li> - * <li>"rf": Specifies one or more fields to add to the search.</li></ul> - * @return The search results. - */ - public InputStream oneshotSearch(String query, Map args) { - args = Args.create(args); - args.put("search", query); - args.put("exec_mode", "oneshot"); - - // By default, don't highlight search terms in the search output. - if (!args.containsKey("segmentation")) { - args.put("segmentation", "none"); - } - - ResponseMessage response = post(JobCollection.REST_PATH, args); - return response.getContent(); - } - - /** - * Creates a oneshot synchronous search using search arguments. - * - * @param query The search query. - * @param args The search arguments:<ul> - * <li>"output_mode": Specifies the output format of the results (XML, JSON, - * or CSV).</li> - * <li>"earliest_time": Specifies the earliest time in the time range to - * search. The time string can be a UTC time (with fractional seconds), a - * relative time specifier (to now), or a formatted time string.</li> - * <li>"latest_time": Specifies the latest time in the time range to search. - * The time string can be a UTC time (with fractional seconds), a relative - * time specifier (to now), or a formatted time string.</li> - * <li>"rf": Specifies one or more fields to add to the search.</li></ul> - * @return The search results. - */ - public InputStream oneshotSearch(String query, Args args) { - return oneshotSearch(query, (Map<String, Object>)args); - } - - /** - * Opens a raw socket to this service. - * - * @param port The port to open. This port must already have been - * created as an allowable TCP input to the service. - * @return The socket. - * @throws java.io.IOException The IOException instance - */ - public Socket open(int port) throws IOException { - return new Socket(this.host, port); - } - - /** - * Parses a search query and returns a semantic map for the search in JSON - * format. - * - * @param query The search query. - * @return The parse response message. - */ - public ResponseMessage parse(String query) { - return parse(query, null); - } - - /** - * Parses a search query with additional arguments and returns a semantic - * map for the search in JSON format. - * - * @param query The search query. - * @param args Additional parse arguments. - * @return The parse response message. - */ - public ResponseMessage parse(String query, Map args) { - args = Args.create(args).add("q", query); - - if(enableV2SearchApi()) - return post("search/v2/parser", args); - else - return get("search/parser", args); - } - - /** - * Restarts the service. The service will be unavailable until it has - * successfully restarted. - * - * @return The restart response message. - */ - public ResponseMessage restart() { - return post("server/control/restart"); - } - - /** - * Creates an asynchronous search using the given query. Use this - * method for simple searches. - * - * @param query The search query. - * @return The search job. - */ - public Job search(String query) { - return search(query, null); - } - - /** - * Creates an asynchronous search job using the given query and - * search arguments. - * - * @param query The search query. - * @param args The search arguments. - * @return The search job. - */ - public Job search(String query, Map<String, Object> args) { - args = Args.create(args); - - return this.getJobs().create(query, args); - } - - /** - * Issues an HTTP request against the service using a request path and - * message. - * This method overrides the base {@code HttpService.send} method - * and applies the Splunk authorization header, which is required for - * authenticated interactions with the Splunk service. - * - * @param path The request path. - * @param request The request message. - * @return The HTTP response. - */ - @Override public ResponseMessage send(String path, RequestMessage request) { - // cookieStore is a protected member of HttpService - if (token != null && !cookieStore.hasSplunkAuthCookie() ) { - request.getHeader().put("Authorization", token); - } - ResponseMessage responseMessage = super.send(fullpath(path), request); - if(responseMessage.getStatus() == 401 && (this.autologin && this.username!= null && this.password != null)){ - // Executing re-login to renew the session token. - this.login(this.username, this.password); - responseMessage = super.send(fullpath(path), request); - }else if(responseMessage.getStatus() >= 400){ - //if autologin is not set to true or username/password is not set, throw HTTPException - throw HttpException.create(responseMessage); - } - return responseMessage; - } - - /** - * Provides a session token for use by this {@code Service} instance. - * Session tokens can be shared across multiple {@code Service} instances. - * - * @param value The session token, which is a basic authorization header in - * the format "Basic <i>sessiontoken</i>", where <i>sessiontoken</i> is the - * Base64-encoded "username:password" string. - */ - public void setToken(String value) { - this.token = value; - } - - /** - * Provides a session token having <b>Splunk</b> added before token. - * This method is specifically used when user just have token value. - * - * @param value The token value - */ - public void setSplunkToken(String value) { - this.token = value.contains("Splunk") ? value : "Splunk " + value; - } - - /** - * Provides a session token having <b>Bearer</b> added before token. - * This method is specifically used when user just have token value. - * - * @param value The token value - */ - public void setBearerToken(String value) { - this.token = value.contains("Splunk") || value.contains("Bearer") ? value : "Bearer " + value; - } - - - public boolean enableV2SearchApi(){ - if(null == this.instanceType){ - this.instanceType = this.getInfo().getInstanceType(); - } - if(this.instanceType.equalsIgnoreCase("cloud")) { - return versionIsAtLeast("9.0.2209"); - }else{ - return versionIsAtLeast("9.0.2"); - } - } - - /** - * Returns true if this Splunk instance's version is no earlier than - * the version specified in {@code version}. - * - * So when called on a Splunk 4.3.2 instance: - * * {@code versionIsAtLeast("4.3.2")} is {@code true}. - * * {@code versionIsAtLeast("4.1.0")} is {@code true}. - * * {@code versionIsAtLeast("5.0.0")} is {@code false}. - * - * @param version The version to compare this Splunk instance's version against. - * @return {@code true} if this Splunk instance's version is equal or - * greater than {@code version}; {@code false} otherwise. - */ - boolean versionIsAtLeast(String version) { - return versionCompare(version) >= 0; - } - - /** - * Returns true if this Splunk instance's version is earlier than - * the version specified in {@code version}. - * - * So when called on a Splunk 4.3.2 instance: - * * {@code versionIsEarlierThan("4.3.2")} is {@code false}. - * * {@code versionIsEarlierThan("4.1.0")} is {@code false}. - * * {@code versionIsEarlierThan("5.0.0")} is {@code true}. - * - * @param version The version to compare this Splunk instance's version against. - * @return {@code true} if this Splunk instance's version is less - * than {@code version}; {@code false} otherwise. - */ - boolean versionIsEarlierThan(String version) { - return versionCompare(version) < 0; - } - - /** - * Returns a value indicating how the version of this Splunk instance - * compares to a given version: - * <ul> - * <li>{@code -1 if this version < the given version}</li> - * <li>{@code 0 if this version = the given version}</li> - * <li>{@code 1 if this version > the given version}</li> - * </ul> - * - * @param otherVersion The other version to compare to. - * @return -1 if this version is less than, 0 if this version is equal to, - * or 1 if this version is greater than the given version. - */ - public int versionCompare(String otherVersion) { - if(null == this.version){ - this.version = this.getInfo().getVersion(); - } - String[] components1 = this.version.split("\\."); - String[] components2 = otherVersion.split("\\."); - int numComponents = Math.max(components1.length, components2.length); - - for (int i = 0; i < numComponents; i++) { - int c1 = (i < components1.length) - ? Integer.parseInt(components1[i], 10) : 0; - int c2 = (i < components2.length) - ? Integer.parseInt(components2[i], 10) : 0; - if (c1 < c2) { - return -1; - } else if (c1 > c2) { - return 1; - } - } - return 0; - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.net.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * The {@code Service} class represents a Splunk service instance at a given + * address (host:port), accessed using the {@code http} or {@code https} + * protocol scheme. + * <p> + * A {@code Service} instance also captures an optional namespace context + * consisting of an optional owner name (or "-" wildcard) and optional app name + * (or "-" wildcard). + * <p> + * To access {@code Service} members, the {@code Service} instance must be + * authenticated by presenting credentials using the {@code login} method, or + * by constructing the {@code Service} instance using the {@code connect} + * method, which both creates and authenticates the instance. + */ +public class Service extends BaseService { + /** The current app context. */ + protected String app = null; + + /** The current session token. */ + protected String token = null; + + /** The current owner context. A value of "nobody" means that all users + * have access to the resource. + */ + protected String owner = null; + + /** The Splunk account username, which is used to authenticate the Splunk + * instance. */ + protected String username = null; + + /** The password, which is used to authenticate the Splunk instance. */ + protected String password = null; + + /** The default simple receiver endpoint. */ + protected String simpleReceiverEndPoint = "/services/receivers/simple"; + + /** The default password endpoint, can change over Splunk versions. */ + protected String passwordEndPoint = "admin/passwords"; + + /** The version of this Splunk instance, once logged in. */ + public String version = null; + + /** The type of this Splunk instance, once logged in. */ + public String instanceType = null; + + /** The default host name, which is used when a host name is not provided.*/ + public static String DEFAULT_HOST = "localhost"; + + /** The default port number, which is used when a port number is not + * provided. */ + public static int DEFAULT_PORT = 8089; + + /** The default scheme, which is used when a scheme is not provided. */ + public static String DEFAULT_SCHEME = "https"; + + /** Flag to notify SDK to try for re-login if the session has expired API call*/ + protected boolean autologin = false; + + /** + * Creates a new {@code Service} instance using a host. + * + * @param host The host name. + */ + public Service(String host) { + super(host); + } + + /** + * Creates a new {@code Service} instance using a host and port. + * + * @param host The host name. + * @param port The port number. + */ + public Service(String host, int port) { + super(host, port); + } + + /** + * Creates a new {@code Service} instance using a host, port, and + * scheme for accessing the service ({@code http} or {@code https}). + * + * @param host The host name. + * @param port The port number. + * @param scheme The scheme ({@code http} or {@code https}). + */ + public Service(String host, int port, String scheme) { + super(host, port, scheme); + } + + /** + * Constructs a new {@code Service} instance using the given host, + * port, and scheme, and instructing it to use the specified HTTPS handler. + * + * @param host The host name of the service. + * @param port The port number of the service. + * @param scheme Scheme for accessing the service ({@code http} or + * {@code https}). + * @param httpsHandler The URLStreamHandler instance + */ + public Service(String host, int port, String scheme, + URLStreamHandler httpsHandler) { + this.host = host; + this.port = port; + this.scheme = scheme; + this.httpsHandler = httpsHandler; + } + + /** + * Creates a new {@code Service} instance using a collection of arguments. + * + * @param args The {@code ServiceArgs} to initialize the service. + */ + // NOTE: This overload exists primarily to provide better documentation + // for the "args" parameter. + @SuppressWarnings("deprecation") + public Service(ServiceArgs args) { + super(); + // NOTE: Must read the deprecated fields for backward compatibility. + // (Consider the case where the fields are initialized directly, + // rather than using the new setters.) + // NOTE: Must also read the underlying dictionary for forward compatibility. + // (Consider the case where the user calls Map.put() directly, + // rather than using the new setters.) + this.app = Args.get(args, "app", args.app != null ? args.app : null); + this.host = Args.get(args, "host", args.host != null ? args.host : DEFAULT_HOST); + this.owner = Args.get(args, "owner", args.owner != null ? args.owner : null); + this.port = Args.<Integer>get(args, "port", args.port != null ? args.port : DEFAULT_PORT); + this.scheme = Args.get(args, "scheme", args.scheme != null ? args.scheme : DEFAULT_SCHEME); + this.token = Args.get(args, "token", args.token != null ? args.token : null); + this.username = (String)args.get("username"); + this.password = (String)args.get("password"); + this.autologin = Args.<Boolean>get(args, "autologin", false); + this.httpsHandler = Args.get(args, "httpsHandler", null); + this.setSslSecurityProtocol(Args.get(args, "SSLSecurityProtocol", Service.getSslSecurityProtocol())); + this.addCookie((String)args.get("cookie")); + this.setCustomHeaders((Map<String, String>) args.get("customHeaders")); + } + + /** + * Creates a new {@code Service} instance using a map of arguments. + * + * @param args A {@code Map} of arguments to initialize the service. + */ + public Service(Map<String, Object> args) { + super(); + this.app = Args.get(args, "app", null); + this.host = Args.get(args, "host", DEFAULT_HOST); + this.owner = Args.get(args, "owner", null); + this.port = Args.<Integer>get(args, "port", DEFAULT_PORT); + this.scheme = Args.get(args, "scheme", DEFAULT_SCHEME); + this.token = Args.get(args, "token", null); + this.username = (String)args.get("username"); + this.password = (String)args.get("password"); + this.autologin = Args.<Boolean>get(args, "autologin", false); + this.httpsHandler = Args.get(args, "httpsHandler", null); + this.setSslSecurityProtocol(Args.get(args, "SSLSecurityProtocol", Service.getSslSecurityProtocol())); + this.addCookie((String)args.get("cookie")); + this.connectTimeout = Args.<Integer>get(args, "connectTimeout", null); + this.readTimeout = Args.<Integer>get(args, "readTimeout", null); + } + + /** + * Establishes a connection to a Splunk service using a map of arguments. + * This member creates a new {@code Service} instance and authenticates + * the session using credentials passed in from the {@code args} map. + * + * @param args The {@code args} map. + * @return A new {@code Service} instance. + */ + public static Service connect(Map<String, Object> args) { + Service service = new Service(args); + if (args.containsKey("username")) { + service.login(); + } + return service; + } + + /** + * Runs an export search (using the {@code search/jobs/export} endpoint), + * and streams results back in an input stream. + * + * @param search The search query to run. + * @return The {@code InputStream} object that contains the search results. + */ + public InputStream export(String search) { + return export(search, null); + } + + /** + * Runs an export search with arguments (using the {@code search/jobs/export} + * endpoint), and streams results back in an input stream. + * + * @param search The search query to run. + * @param args Additional search arguments. + * For a list of possible parameters, see + * <a href="http://dev.splunk.com/view/SP-CAAAEHQ#savedsearchparams" + * target="_blank">Saved search parameters</a> on + * <a href="http://dev.splunk.com/view/SP-CAAAEHQ" + * target="_blank">dev.splunk.com</a>. + * @return The {@code InputStream} object that contains the search results. + */ + public InputStream export(String search, Map args) { + args = Args.create(args).add("search", search); + // By default don't highlight search terms in the output. + if (!args.containsKey("segmentation")) { + args.put("segmentation", "none"); + } + ResponseMessage response; + + if(enableV2SearchApi()) + response = post(JobCollection.REST_PATH_V2 + "/export", args); + else { + response = post(JobCollection.REST_PATH + "/export", args); + } + return new ExportResultsStream(response.getContent()); + } + + /** + * Runs an export search with arguments (using the {@code search/jobs/export} + * endpoint), and streams results back in an input stream. + * + * @param search The search query to run. + * @param args Additional search arguments (see {@code JobExportArgs}). + * @return The {@code InputStream} object that contains the search results. + */ + // NOTE: This overload exists primarily to provide better documentation + // for the "args" parameter. + public InputStream export(String search, JobExportArgs args) { + return export(search, (Map<String, Object>) args); + } + + /** + * Ensures that the given path is fully qualified, prepending a path + * prefix if necessary. The path prefix is constructed using the current + * owner and app context when available. + * + * @param path The path to verify. + * @return A fully-qualified resource path. + */ + String fullpath(String path) { + return fullpath(path, null); + } + + /** + * Ensures that a given path is fully qualified, prepending a path + * prefix if necessary. The path prefix is constructed using the + * current owner and app context when available. + * + * @param path The path to verify. + * @param namespace The namespace dictionary (<i>app, owner, sharing</i>). + * @return A fully-qualified resource path. + */ + public String fullpath(String path, Args namespace) { + + // if already fully qualified (i.e. root begins with /) then return + // the already qualified path. + if (path.startsWith("/")) + return path; + + // if no namespace at all, and no service instance of app, and no + // sharing, return base service endpoint + path. + if (namespace == null && app == null) { + return "/services/" + path; + } + + // base namespace values + String localApp = app; + String localOwner = owner; + String localSharing = ""; + + // override with invocation namespace if set. + try{ + if (namespace != null) { + // URL encode the owner and app. + if (namespace.containsKey("app")) { + localApp = URLEncoder.encode((String)namespace.get("app"), "UTF-8"); + } + if (namespace.containsKey("owner")) { + localOwner = URLEncoder.encode((String)namespace.get("owner"), "UTF-8"); + } + if (namespace.containsKey("sharing")) { + localSharing = (String)namespace.get("sharing"); + } + } + }catch (UnsupportedEncodingException e) { + // This is unreachable, since UTF-8 is always supported. + assert false; + } + + + // sharing, if set calls for special mapping, override here. + // "user" --> {user}/{app} + // "app" --> nobody/{app} + // "global" --> nobody/{app} + // "system" --> nobody/system + if (localSharing.equals("app") || localSharing.equals("global")) + localOwner = "nobody"; + else if (localSharing.equals("system")) { + localApp = "system"; + localOwner = "nobody"; + } + + return String.format("/servicesNS/%s/%s/%s", + localOwner == null ? "-" : localOwner, + localApp == null ? "-" : localApp, + path); + } + + /** + * Returns the app context for this {@code Service} instance. + * A {@code null} value indicates no app context, and a value of + * {@code "-"} indicates an app wildcard. + * + * @return The app context. + */ + public String getApp() { + return this.app; + } + + /** + * Returns the collection of applications. + * + * @return The application collection. + */ + public EntityCollection<Application> getApplications() { + return new EntityCollection<>( + this, "/services/apps/local", Application.class); + } + + /** + * Returns the collection of configurations. + * + * @return The configurations collection. + */ + public ConfCollection getConfs() { + return getConfs(null); + } + + /** + * Returns the collection of configurations. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return The configurations collection. + */ + public ConfCollection getConfs(Args args) { + return new ConfCollection(this, args); + } + + /** + * Returns an array of system capabilities. + * + * @return An array of capabilities. + */ + public String[] getCapabilities() { + Entity caps = new Entity(this, "authorization/capabilities"); + return caps.getStringArray("capabilities"); + } + + /** + * Returns the collection of data models. + * @return DataModelCollection instance + */ + public DataModelCollection getDataModels() { + return new DataModelCollection(this); + } + + /** + * Returns the configuration and status of a deployment client. + * + * @return The configuration and status. + */ + public DeploymentClient getDeploymentClient() { + return new DeploymentClient(this); + } + + /** + * Returns the configuration of all deployment servers. + * + * @return The configuration of deployment servers. + */ + public EntityCollection<DeploymentServer> getDeploymentServers() { + return getDeploymentServers(null); + } + + /** + * Returns the collection of deployment servers. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return The configuration of deployment servers. + */ + public EntityCollection<DeploymentServer> getDeploymentServers(Args args) { + String path; + if (versionIsEarlierThan("6.0.0")) { + path = "deployment/server"; + } else { + path = ""; // TODO: Find out what this should be and fix it. + } + return new EntityCollection<>( + this, "deployment/server", DeploymentServer.class, args); + } + + /** + * Returns a collection of class configurations for a deployment server. + * + * @return A collection of class configurations. + */ + public EntityCollection<DeploymentServerClass> getDeploymentServerClasses(){ + return getDeploymentServerClasses(null); + } + + /** + * Returns a collection of class configurations for a deployment server. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of server class configurations. + */ + public EntityCollection<DeploymentServerClass> getDeploymentServerClasses( + Args args) { + String path; + if (versionIsEarlierThan("6.0.0")) { + path = "deployment/serverclass"; + } else { + path = "deployment/server/serverclasses"; + } + return new EntityCollection<>( + this, path, DeploymentServerClass.class, args); + } + + /** + * Returns a collection of multi-tenant configurations. + * + * @return A collection of multi-tenant configurations. + */ + public EntityCollection<DeploymentTenant> getDeploymentTenants() { + return getDeploymentTenants(null); + } + + /** + * Returns a collection of multi-tenant configurations. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of multi-tenant configurations. + */ + public EntityCollection<DeploymentTenant> getDeploymentTenants(Args args) { + return new EntityCollection<>( + this, "deployment/tenants", DeploymentTenant.class, args); + } + + /** + * Returns information about distributed search options. + * + * @return Distributed search information. + */ + public DistributedConfiguration getDistributedConfiguration() { + return new DistributedConfiguration(this); + } + + /** + * Returns a collection of distributed search peers. A <i>search peer</i> + * is a Splunk server to which another Splunk server distributes searches. + * The Splunk server where the search originates is referred to as the + * <i>search head</i>. + * + * @return A collection of search peers. + */ + public EntityCollection<DistributedPeer> getDistributedPeers() { + return getDistributedPeers(null); + } + + /** + * Returns a collection of distributed search peers. A <i>search peer</i> + * is a Splunk server to which another Splunk server distributes searches. + * The Splunk server where the search originates is referred to as the + * <i>search head</i>. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of search peers. + */ + public EntityCollection<DistributedPeer> getDistributedPeers(Args args) { + return new EntityCollection<>( + this, "search/distributed/peers", DistributedPeer.class, args); + } + + + /** + * Returns a collection of saved event types. + * + * @return A collection of saved event types. + */ + public EventTypeCollection getEventTypes() { + return getEventTypes(null); + } + + /** + * Returns a collection of saved event types. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of saved event types. + */ + public EventTypeCollection getEventTypes(Args args) { + return new EventTypeCollection(this, args); + } + + /** + * Returns a collection of alerts that have been fired by the service. + * + * @return A collection of fired alerts. + */ + public FiredAlertGroupCollection getFiredAlertGroups() { + return getFiredAlertsGroups(null); + } + + /** + * Returns a collection of alerts that have been fired by the service. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of fired alerts. + */ + public FiredAlertGroupCollection getFiredAlertsGroups(Args args) { + return new FiredAlertGroupCollection(this, args); + } + + /** + * Returns a collection of Splunk indexes. + * + * @return A collection of indexes. + */ + public IndexCollection getIndexes() { + return getIndexes((IndexCollectionArgs)null); + } + + /** + * Returns a collection of Splunk indexes. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link IndexCollectionArgs}. + * @return A collection of indexes. + */ + // NOTE: This overload exists primarily to provide better documentation + // for the "args" parameter. + public IndexCollection getIndexes(IndexCollectionArgs args) { + return getIndexes((Args)args); + } + + /** + * Returns a collection of Splunk indexes. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link IndexCollectionArgs}. + * @return A collection of indexes. + */ + public IndexCollection getIndexes(Args args) { + return new IndexCollection(this, args); + } + + /** + * Returns information about the Splunk service. + * + * @return Splunk service information. + */ + public ServiceInfo getInfo() { + return new ServiceInfo(this); + } + + /** + * Returns list of all applicable Cluster Master Hosts for the SearchHead Service. + * + * @return List of Cluster Master Host(s). + */ + public List<String> getClusterMasters(){ + Entity caps = new Entity(this, "cluster/config"); + List<String> hosts = new ArrayList<>(); + try { + String clusterMasterURIs = caps.getString("master_uri"); + URL clusterMasterUrl; + //for multi-cluster environment, there might be more than cluster master for the searchHead + if(clusterMasterURIs.contains(",")){ + String[] masterURIs = clusterMasterURIs.split(","); + for(String uri : masterURIs){ + clusterMasterUrl = new URL(uri); + hosts.add(clusterMasterUrl.getHost()); + } + }else { + clusterMasterUrl = new URL(clusterMasterURIs); + hosts.add(clusterMasterUrl.getHost()); + } + return hosts; + } catch (MalformedURLException e) { + e.printStackTrace(); + } + return hosts; + } + + /** + * Returns a collection of configured inputs. + * + * @return A collection of inputs. + */ + public InputCollection getInputs() { + return getInputs(null); + } + + /** + * Returns a collection of configured inputs. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of inputs. + */ + public InputCollection getInputs(Args args) { + return new InputCollection(this, args); + } + + /** + * Returns a collection of current search jobs. + * + * @return A collection of search jobs. + */ + public JobCollection getJobs() { + return getJobs((CollectionArgs)null); + } + + /** + * Returns a collection of current search jobs. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of search jobs. + */ + // NOTE: This overload exists primarily to provide better documentation + // for the "args" parameter. + public JobCollection getJobs(CollectionArgs args) { + return getJobs((Args)args); + } + + /** + * Returns a collection of current search jobs. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of search jobs. + */ + public JobCollection getJobs(Args args) { + return new JobCollection(this, args); + } + + /** + * Returns a Job by the provided sid. + * + * @param sid The sid for a job. + * @return A Job. + */ + public Job getJob(String sid) { + return new Job(this, JobCollection.REST_PATH + "/" + sid); + } + + /** + * Returns a collection of license group configurations. + * + * @return A collection of license group configurations. + */ + public EntityCollection<LicenseGroup> getLicenseGroups() { + return getLicenseGroups(null); + } + + /** + * Returns a collection of license group configurations. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of license group configurations. + */ + public EntityCollection<LicenseGroup> getLicenseGroups(Args args) { + return new EntityCollection<>( + this, "licenser/groups", LicenseGroup.class, args); + } + + /** + * Returns a collection of messages from the licenser. + * + * @return A collection of licenser messages. + */ + public EntityCollection<LicenseMessage> getLicenseMessages() { + return getLicenseMessages(null); + } + + /** + * Returns a collection of messages from the licenser. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of licenser messages. + */ + public EntityCollection<LicenseMessage> getLicenseMessages(Args args) { + return new EntityCollection<>( + this, "licenser/messages", LicenseMessage.class, args); + } + + /** + * Returns the current owner context for this {@code Service} instance. + * A value of {@code "-"} indicates a wildcard, and a {@code null} value + * indicates no owner context. + * + * @return The current owner context. + */ + public String getOwner() { + return this.owner; + } + + /** + * Returns a collection of licenser pool configurations. + * + * @return A collection of licenser pool configurations. + */ + public LicensePoolCollection getLicensePools() { + return getLicensePools(null); + } + + /** + * Returns a collection of licenser pool configurations. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of licenser pool configurations. + */ + public LicensePoolCollection getLicensePools(Args args) { + return new LicensePoolCollection(this, args); + } + + /** + * Returns a collection of slaves reporting to this license master. + * + * @return A collection of licenser slaves. + */ + public EntityCollection<LicenseSlave> getLicenseSlaves() { + return getLicenseSlaves(null); + } + + /** + * Returns a collection of slaves reporting to this license master. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of licenser slaves. + */ + public EntityCollection<LicenseSlave> getLicenseSlaves(Args args) { + return new EntityCollection<>( + this, "licenser/slaves", LicenseSlave.class, args); + } + + /** + * Returns a collection of license stack configurations. + * + * @return A collection of license stack configurations. + */ + public EntityCollection<LicenseStack> getLicenseStacks() { + return getLicenseStacks(null); + } + + /** + * Returns a collection of license stack configurations. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of license stack configurations. + */ + public EntityCollection<LicenseStack> getLicenseStacks(Args args) { + return new EntityCollection<>( + this, "licenser/stacks", LicenseStack.class, args); + } + + /** + * Returns a collection of licenses for this service. + * + * @return A collection of licenses. + */ + public EntityCollection<License> getLicenses() { + return getLicenses(null); + } + + /** + * Returns a collection of licenses for this service. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of licenses. + */ + public EntityCollection<License> getLicenses(Args args) { + return new EntityCollection<>( + this, "licenser/licenses", License.class, args); + } + + /** + * Returns a collection of service logging categories and their status. + * + * @return A collection of logging categories. + */ + public EntityCollection<Logger> getLoggers() { + return getLoggers(null); + } + + /** + * Returns a collection of service logging categories and their status. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of logging categories. + */ + public EntityCollection<Logger> getLoggers(Args args) { + return new EntityCollection<>( + this, "server/logger", Logger.class, args); + } + + /** + * Returns a collection of system messages. + * + * @return A collection of system messages. + */ + public MessageCollection getMessages() { + return getMessages(null); + } + + /** + * Returns a collection of system messages. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of system messages. + */ + public MessageCollection getMessages(Args args) { + return new MessageCollection(this, args); + } + + /** + * Returns a collection of modular inputs. + * + * @return A collection of modular inputs. + */ + public ResourceCollection<ModularInputKind> getModularInputKinds() { + return getModularInputKinds(null); + } + + /** + * Returns a collection of modular inputs. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of modular inputs. + */ + public ResourceCollection<ModularInputKind> getModularInputKinds(Args args) { + return new ResourceCollection<>( + this, "data/modular-inputs", ModularInputKind.class, args); + } + + /** + * Returns global TCP output properties. + * + * @return Global TCP output properties. + */ + public OutputDefault getOutputDefault() { + return new OutputDefault(this); + } + + /** + * Returns a collection of output group configurations. + * + * @return A collection of output group configurations. + */ + public EntityCollection<OutputGroup> getOutputGroups() { + return getOutputGroups(null); + } + + /** + * Returns a collection of output group configurations. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of output group configurations. + */ + public EntityCollection<OutputGroup> getOutputGroups(Args args) { + return new EntityCollection<>( + this, "data/outputs/tcp/group", OutputGroup.class, args); + } + + /** + * Returns a collection of data-forwarding configurations. + * + * @return A collection of data-forwarding configurations. + */ + public EntityCollection<OutputServer> getOutputServers() { + return getOutputServers(null); + } + + /** + * Returns a collection of data-forwarding configurations. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of data-forwarding configurations. + */ + public EntityCollection<OutputServer> getOutputServers(Args args) { + return new EntityCollection<>( + this, "data/outputs/tcp/server", OutputServer.class, args); + } + + /** + * Returns a collection of configurations for forwarding data in standard + * syslog format. + * + * @return A collection of syslog forwarders. + */ + public EntityCollection<OutputSyslog> getOutputSyslogs() { + return getOutputSyslogs(null); + } + + /** + * Returns a collection of configurations for forwarding data in standard + * syslog format. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of syslog forwarders. + */ + public EntityCollection<OutputSyslog> getOutputSyslogs(Args args) { + return new EntityCollection<>( + this, "data/outputs/tcp/syslog", OutputSyslog.class, args); + } + + /** + * Returns the current password that was used to authenticate the session. + * + * @return The current password. + */ + public String getPassword() { + return this.password; + } + + /** + * Returns a collection of passwords. This collection is used for managing + * secure credentials. + * + * @return A collection of passwords. + */ + public PasswordCollection getPasswords() { + return getPasswords(null); + } + + /** + * Returns a collection of passwords. This collection is used for managing + * secure credentials. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of passwords. + */ + public PasswordCollection getPasswords(Args args) { + return new PasswordCollection(this, args); + } + + /** + * Returns the receiver object for the Splunk service. + * + * @return A Splunk receiver object. + */ + public Receiver getReceiver() { + return new Receiver(this); + } + + /** + * Returns a collection of Splunk user roles. + * + * @return A collection of user roles. + */ + public EntityCollection<Role> getRoles() { + return getRoles(null); + } + + /** + * Returns a collection of Splunk user roles. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of user roles. + */ + public EntityCollection<Role> getRoles(Args args) { + return new EntityCollection<>( + this, "authorization/roles", Role.class, args); + } + + /** + * Returns a collection of saved searches. + * + * @return A collection of saved searches. + */ + public SavedSearchCollection getSavedSearches() { + return getSavedSearches((SavedSearchCollectionArgs)null); + } + + /** + * Returns a collection of saved searches. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link SavedSearchCollectionArgs}. + * @return A collection of saved searches. + */ + // NOTE: This overload exists primarily to provide better documentation + // for the "args" parameter. + public SavedSearchCollection getSavedSearches(SavedSearchCollectionArgs args) { + return getSavedSearches((Args)args); + } + + /** + * Returns a collection of saved searches. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of saved searches. + */ + public SavedSearchCollection getSavedSearches(Args args) { + return new SavedSearchCollection(this, args); + } + + /** + * Returns a Saved Search by the provided title key. + * + * @param title The title for a job. + * @return A SavedSearch. + */ + public SavedSearch getSavedSearch(String title) { + return new SavedSearch(this, JobCollection.REST_PATH + "/" + title); + } + + /** + * Returns service configuration information for an instance of Splunk. + * + * @return Service configuration information. + */ + public Settings getSettings() { + return new Settings(this); + } + + /** + * Returns the current session token. Session tokens can be shared across + * multiple {@code Service} instances. + * + * @return The session token. + */ + public String getToken() { + return this.token; + } + + /** + * Returns a collection of in-progress oneshot uploads. + * + * @return A collection of in-progress oneshot uploads + */ + public EntityCollection<Upload> getUploads() { + return getUploads(null); + } + + /** + * Returns a collection of in-progress oneshot uploads. + * + * @param namespace This collection's namespace; there are no other + * optional arguments for this endpoint. + * @return A collection of in-progress oneshot uploads + */ + public EntityCollection<Upload> getUploads(Args namespace) { + return new EntityCollection<>( + this, "data/inputs/oneshot", Upload.class, namespace); + } + + /** + * Returns the Splunk account username that was used to authenticate the + * current session. + * + * @return The current username. + */ + public String getUsername() { + return this.username; + } + + /** + * Returns a collection of Splunk users. + * + * @return A collection of users. + */ + public UserCollection getUsers() { + return getUsers(null); + } + + /** + * Returns a collection of Splunk users. + * + * @param args Collection arguments that specify the number of entities to + * return and how to sort them. See {@link CollectionArgs}. + * @return A collection of users. + */ + public UserCollection getUsers(Args args) { + return new UserCollection(this, args); + } + + /** + * Authenticates the {@code Service} instance with the username and password + * that were specified when the instance was created. + * + * Three cases: + * 1. If we have a cookie, but are missing username and/or password, login is noop + * 2. If we don't have a cookie, and are missing username and/or password we can't login + * 3. Otherwise login as usual + * + * @return The current {@code Service} instance. + */ + public Service login() { + if (this.cookieStore.hasSplunkAuthCookie() && (this.username == null || this.password == null)) { + return this; + } + else if (this.username == null || this.password == null) { + throw new IllegalStateException("Missing username or password."); + } + else { + return login(this.username, this.password); + } + } + + /** + * Authenticates the {@code Service} instance with a specified username and + * password. Note that these values override any previously-set values for + * username and password. + * + * @param username The Splunk account username. + * @param password The password for the username. + * @return The current {@code Service} instance. + */ + public Service login(String username, String password) { + this.username = username; + this.password = password; + + Args args = new Args(); + args.put("username", username); + args.put("password", password); + args.put("cookie", "1"); + ResponseMessage response = post("/services/auth/login", args); + String sessionKey = Xml.parse(response.getContent()) + .getElementsByTagName("sessionKey") + .item(0) + .getTextContent(); + this.token = "Splunk " + sessionKey; + ServiceInfo serviceInfoEntity = this.getInfo(); + this.version = serviceInfoEntity.getVersion(); + this.instanceType = serviceInfoEntity.getInstanceType(); + if (versionCompare("4.3") >= 0) + this.passwordEndPoint = "storage/passwords"; + + return this; + } + + /** + * Forgets the current session token. + * + * @return The current {@code Service} instance. + */ + public Service logout() { + this.token = null; + this.removeAllCookies(); + return this; + } + + /** + * Creates a oneshot synchronous search. + * + * @param query The search query. + * @return The search results. + */ + public InputStream oneshotSearch(String query) { + return oneshotSearch(query, null); + } + + /** + * Creates a oneshot synchronous search using search arguments. + * + * @param query The search query. + * @param args The search arguments:<ul> + * <li>"output_mode": Specifies the output format of the results (XML, JSON, + * or CSV).</li> + * <li>"earliest_time": Specifies the earliest time in the time range to + * search. The time string can be a UTC time (with fractional seconds), a + * relative time specifier (to now), or a formatted time string.</li> + * <li>"latest_time": Specifies the latest time in the time range to search. + * The time string can be a UTC time (with fractional seconds), a relative + * time specifier (to now), or a formatted time string.</li> + * <li>"rf": Specifies one or more fields to add to the search.</li></ul> + * @return The search results. + */ + public InputStream oneshotSearch(String query, Map args) { + args = Args.create(args); + args.put("search", query); + args.put("exec_mode", "oneshot"); + + // By default, don't highlight search terms in the search output. + if (!args.containsKey("segmentation")) { + args.put("segmentation", "none"); + } + + ResponseMessage response = post(JobCollection.REST_PATH, args); + return response.getContent(); + } + + /** + * Creates a oneshot synchronous search using search arguments. + * + * @param query The search query. + * @param args The search arguments:<ul> + * <li>"output_mode": Specifies the output format of the results (XML, JSON, + * or CSV).</li> + * <li>"earliest_time": Specifies the earliest time in the time range to + * search. The time string can be a UTC time (with fractional seconds), a + * relative time specifier (to now), or a formatted time string.</li> + * <li>"latest_time": Specifies the latest time in the time range to search. + * The time string can be a UTC time (with fractional seconds), a relative + * time specifier (to now), or a formatted time string.</li> + * <li>"rf": Specifies one or more fields to add to the search.</li></ul> + * @return The search results. + */ + public InputStream oneshotSearch(String query, Args args) { + return oneshotSearch(query, (Map<String, Object>)args); + } + + /** + * Opens a raw socket to this service. + * + * @param port The port to open. This port must already have been + * created as an allowable TCP input to the service. + * @return The socket. + * @throws java.io.IOException The IOException instance + */ + public Socket open(int port) throws IOException { + return new Socket(this.host, port); + } + + /** + * Parses a search query and returns a semantic map for the search in JSON + * format. + * + * @param query The search query. + * @return The parse response message. + */ + public ResponseMessage parse(String query) { + return parse(query, null); + } + + /** + * Parses a search query with additional arguments and returns a semantic + * map for the search in JSON format. + * + * @param query The search query. + * @param args Additional parse arguments. + * @return The parse response message. + */ + public ResponseMessage parse(String query, Map args) { + args = Args.create(args).add("q", query); + + if(enableV2SearchApi()) + return post("search/v2/parser", args); + else + return get("search/parser", args); + } + + /** + * Restarts the service. The service will be unavailable until it has + * successfully restarted. + * + * @return The restart response message. + */ + public ResponseMessage restart() { + return post("server/control/restart"); + } + + /** + * Creates an asynchronous search using the given query. Use this + * method for simple searches. + * + * @param query The search query. + * @return The search job. + */ + public Job search(String query) { + return search(query, null); + } + + /** + * Creates an asynchronous search job using the given query and + * search arguments. + * + * @param query The search query. + * @param args The search arguments. + * @return The search job. + */ + public Job search(String query, Map<String, Object> args) { + args = Args.create(args); + + return this.getJobs().create(query, args); + } + + /** + * Issues an HTTP request against the service using a request path and + * message. + * This method overrides the base {@code HttpService.send} method + * and applies the Splunk authorization header, which is required for + * authenticated interactions with the Splunk service. + * + * @param path The request path. + * @param request The request message. + * @return The HTTP response. + */ + @Override public ResponseMessage send(String path, RequestMessage request) { + // cookieStore is a protected member of HttpService + if (token != null && !cookieStore.hasSplunkAuthCookie() ) { + request.getHeader().put("Authorization", token); + } + ResponseMessage responseMessage = super.send(fullpath(path), request); + if(responseMessage.getStatus() == 401 && (this.autologin && this.username!= null && this.password != null)){ + // Executing re-login to renew the session token. + this.login(this.username, this.password); + responseMessage = super.send(fullpath(path), request); + }else if(responseMessage.getStatus() >= 400){ + //if autologin is not set to true or username/password is not set, throw HTTPException + throw HttpException.create(responseMessage); + } + return responseMessage; + } + + /** + * Provides a session token for use by this {@code Service} instance. + * Session tokens can be shared across multiple {@code Service} instances. + * + * @param value The session token, which is a basic authorization header in + * the format "Basic <i>sessiontoken</i>", where <i>sessiontoken</i> is the + * Base64-encoded "username:password" string. + */ + public void setToken(String value) { + this.token = value; + } + + /** + * Provides a session token having <b>Splunk</b> added before token. + * This method is specifically used when user just have token value. + * + * @param value The token value + */ + public void setSplunkToken(String value) { + this.token = value.contains("Splunk") ? value : "Splunk " + value; + } + + /** + * Provides a session token having <b>Bearer</b> added before token. + * This method is specifically used when user just have token value. + * + * @param value The token value + */ + public void setBearerToken(String value) { + this.token = value.contains("Splunk") || value.contains("Bearer") ? value : "Bearer " + value; + } + + + public boolean enableV2SearchApi(){ + if(null == this.instanceType){ + this.instanceType = this.getInfo().getInstanceType(); + } + if(this.instanceType.equalsIgnoreCase("cloud")) { + return versionIsAtLeast("9.0.2209"); + }else{ + return versionIsAtLeast("9.0.2"); + } + } + + /** + * Returns true if this Splunk instance's version is no earlier than + * the version specified in {@code version}. + * + * So when called on a Splunk 4.3.2 instance: + * * {@code versionIsAtLeast("4.3.2")} is {@code true}. + * * {@code versionIsAtLeast("4.1.0")} is {@code true}. + * * {@code versionIsAtLeast("5.0.0")} is {@code false}. + * + * @param version The version to compare this Splunk instance's version against. + * @return {@code true} if this Splunk instance's version is equal or + * greater than {@code version}; {@code false} otherwise. + */ + boolean versionIsAtLeast(String version) { + return versionCompare(version) >= 0; + } + + /** + * Returns true if this Splunk instance's version is earlier than + * the version specified in {@code version}. + * + * So when called on a Splunk 4.3.2 instance: + * * {@code versionIsEarlierThan("4.3.2")} is {@code false}. + * * {@code versionIsEarlierThan("4.1.0")} is {@code false}. + * * {@code versionIsEarlierThan("5.0.0")} is {@code true}. + * + * @param version The version to compare this Splunk instance's version against. + * @return {@code true} if this Splunk instance's version is less + * than {@code version}; {@code false} otherwise. + */ + boolean versionIsEarlierThan(String version) { + return versionCompare(version) < 0; + } + + /** + * Returns a value indicating how the version of this Splunk instance + * compares to a given version: + * <ul> + * <li>{@code -1 if this version < the given version}</li> + * <li>{@code 0 if this version = the given version}</li> + * <li>{@code 1 if this version > the given version}</li> + * </ul> + * + * @param otherVersion The other version to compare to. + * @return -1 if this version is less than, 0 if this version is equal to, + * or 1 if this version is greater than the given version. + */ + public int versionCompare(String otherVersion) { + if(null == this.version){ + this.version = this.getInfo().getVersion(); + } + String[] components1 = this.version.split("\\."); + String[] components2 = otherVersion.split("\\."); + int numComponents = Math.max(components1.length, components2.length); + + for (int i = 0; i < numComponents; i++) { + int c1 = (i < components1.length) + ? Integer.parseInt(components1[i], 10) : 0; + int c2 = (i < components2.length) + ? Integer.parseInt(components2[i], 10) : 0; + if (c1 < c2) { + return -1; + } else if (c1 > c2) { + return 1; + } + } + return 0; + } +} diff --git a/splunk/src/main/java/com/splunk/Settings.java b/splunk/src/main/java/com/splunk/Settings.java index db925a22..979d3458 100644 --- a/splunk/src/main/java/com/splunk/Settings.java +++ b/splunk/src/main/java/com/splunk/Settings.java @@ -1,289 +1,289 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.HashMap; -import java.util.Map; - -/** - * The {@code Settings} class represents configuration information for an - * instance of Splunk. - */ -public class Settings extends Entity { - Settings(Service service) { - super(service, "server/settings"); - } - - /** - * Returns the fully-qualified path to the directory containing the - * default index for this instance of Splunk. - * - * @return The path to the Splunk index directory. - */ - public String getSplunkDB() { - return getString("SPLUNK_DB"); - } - - /** - * Returns the fully-qualified path to the Splunk installation directory. - * - * @return The path to the Splunk installation directory. - */ - public String getSplunkHome() { - return getString("SPLUNK_HOME"); - } - - /** - * Indicates whether SSL is enabled on the Splunk management port. - * - * @return {@code true} if SSL is enabled, {@code false} if not. - */ - public boolean getEnableSplunkWebSSL() { - return getBoolean("enableSplunkWebSSL"); - } - - /** - * Returns the default host name to use for data inputs. - * - * @return The host name. - */ - public String getHost() { - return getString("host", null); - } - - /** - * Returns the port on which Splunk Web is listening for this - * instance of Splunk. The port number defaults to 8000. - * - * @return The Splunk Web port number. - */ - public int getHttpPort() { - return getInteger("httpport"); - } - - /** - * Returns the IP address:port number for Splunkd. - * - * @return The IP address:port number. - */ - public int getMgmtPort() { - return getInteger("mgmtHostPort"); - } - - /** - * Returns the amount of free disk space that is required for Splunk - * to continue searching and indexing. - * - * @return The required amount of free disk space, in megabytes. - */ - public int getMinFreeSpace() { - return getInteger("minFreeSpace"); - } - - /** - * Returns the string that is prepended to the Splunk symmetric key to - * generate the final key that used to sign all traffic between master and - * slave licensers. - * - * @return Licenser symmetric key. - */ - public String getPass4SymmKey() { - return getString("pass4SymmKey"); - } - - /** - * Returns the name that is used to identify this Splunk instance for - * features such as distributed search. - * - * @return The name used to identify the Splunk instance. - */ - public String getServerName() { - return getString("serverName"); - } - - /** - * Returns the amount of time before a user session times out. - * - * @return The session time-out. - */ - public String getSessionTimeout() { - return getString("sessionTimeout"); - } - - /** - * Indicates whether the instance is configured to start Splunk Web. - * - * @return {@code true} if the instance is configured to start Splunk Web, - * {@code false} if Splunk Web is disabled. - */ - public boolean getStartWebServer() { - return getBoolean("startwebserver"); - } - - /** - * Returns the IP address of the authenticating proxy. - * - * @return The IP address of the authenticating proxy. - */ - public String getTrustedIP() { - return getString("trustedIP", null); - } - - /** - * Sets the fully-qualified local path to the default index. - * The default value is {@code $SPLUNK_HOME/var/lib/splunk/defaultdb/db/}. - * - * @param path The local path to the default index. - */ - public void setSplunkDBPath(String path) { - setCacheValue("SPLUNK_DB", path); - } - - /** - * Sets whether Splunk Web uses HTTP or HTTPS. - * - * @param useHttps {@code true} to use SSL and HTTPS, {@code false} to use - * HTTP. - */ - public void setEnableSplunkWebSSL(boolean useHttps) { - setCacheValue("enableSplunkWebSSL", useHttps); - } - - /** - * Sets the default host name to use for data inputs that do not override - * this setting. - * - * @param host The default host name. - */ - public void setHost(String host) { - setCacheValue("host", host); - } - - /** - * Sets the Splunk Web listening port. If Splunk uses SSL and HTTPS, this - * value should be set to the HTTPS port number. - * <p> - * <b>Note:</b> The port must be present for Splunk Web to start. If this - * value is omitted or set to 0, the server will not start an HTTP listener. - * @see #getEnableSplunkWebSSL - * - * @param port The Splunk Web listening port. - */ - public void setHttpPort(int port) { - setCacheValue("httpport", port); - } - - /** - * Sets the management port for splunkd. - * The default value is {@code 8089}. - * - * @param port The port for the management interface. - */ - public void setMgmtPort(int port) { - setCacheValue("mgmtHostPort", port); - } - - /** - * Sets the amount of free disk space that must exist for splunkd to - * continue operating. - * <p> - * Before attempting to run a search, Splunk requires this amount of - * free space on the file system where the dispatch directory is stored - * ({@code $SPLUNK_HOME/var/run/splunk/dispatch}). - * - * @param minFreeSpace The minimum free space, in megabytes. - */ - public void setMinimumFreeSpace(int minFreeSpace) { - setCacheValue("minFreeSpace", minFreeSpace); - } - - /** - * Sets the password string that is prepended to the Splunk symmetric key - * to generate the final key, which is used to sign all traffic between - * master/slave licensers. - * - * @param pass4SymmKey The prepended password string. - */ - public void setPasswordSymmKey(String pass4SymmKey) { - setCacheValue("pass4SymmKey", pass4SymmKey); - } - - /** - * Sets the name that is used to identify this Splunk instance for features - * such as distributed search. The default value is - * {@code <hostname>-<user running splunk>}. - * - * @param serverName The server name. - */ - public void setServerName(String serverName) { - setCacheValue("serverName", serverName); - } - - /** - * Sets the session timeout. - * The valid format is <i>number</i> followed by a time unit ("s", "h", - * or "d"). - * - * @param sessionTimeout The session timeout value. - */ - public void setSessionTimeout(String sessionTimeout) { - setCacheValue("sessionTimeout", sessionTimeout); - } - - /** - * Sets whether to start Splunk Web. - * - * @param startwebserver {@code true} to start Splunk Web, {@code false} if - * not. - */ - public void setStartWebServer(boolean startwebserver) { - setCacheValue("startwebserver", startwebserver); - } - - /** - * Sets the IP address of the authenticating proxy. Set this value to a - * valid IP address to enable SSO. - * - * This attribute is disabled by default. The normal value is "127.0.0.1". - * - * @param trustedIP The authenticating proxy's IP address. - */ - public void setTrustedIP(String trustedIP) { - setCacheValue("trustedIP", trustedIP); - } - - /** - * {@inheritDoc} - */ - @Override public void update(Map<String, Object> args) { - // Merge cached setters and live args together before updating. - HashMap<String, Object> mergedArgs = new HashMap<>(); - mergedArgs.putAll(toUpdate); - mergedArgs.putAll(args); - service.post(path + "/settings", mergedArgs); - toUpdate.clear(); - invalidate(); - } - - /** - * {@inheritDoc} - */ - @Override public void update() { - service.post(path + "/settings", toUpdate); - invalidate(); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.HashMap; +import java.util.Map; + +/** + * The {@code Settings} class represents configuration information for an + * instance of Splunk. + */ +public class Settings extends Entity { + Settings(Service service) { + super(service, "server/settings"); + } + + /** + * Returns the fully-qualified path to the directory containing the + * default index for this instance of Splunk. + * + * @return The path to the Splunk index directory. + */ + public String getSplunkDB() { + return getString("SPLUNK_DB"); + } + + /** + * Returns the fully-qualified path to the Splunk installation directory. + * + * @return The path to the Splunk installation directory. + */ + public String getSplunkHome() { + return getString("SPLUNK_HOME"); + } + + /** + * Indicates whether SSL is enabled on the Splunk management port. + * + * @return {@code true} if SSL is enabled, {@code false} if not. + */ + public boolean getEnableSplunkWebSSL() { + return getBoolean("enableSplunkWebSSL"); + } + + /** + * Returns the default host name to use for data inputs. + * + * @return The host name. + */ + public String getHost() { + return getString("host", null); + } + + /** + * Returns the port on which Splunk Web is listening for this + * instance of Splunk. The port number defaults to 8000. + * + * @return The Splunk Web port number. + */ + public int getHttpPort() { + return getInteger("httpport"); + } + + /** + * Returns the IP address:port number for Splunkd. + * + * @return The IP address:port number. + */ + public int getMgmtPort() { + return getInteger("mgmtHostPort"); + } + + /** + * Returns the amount of free disk space that is required for Splunk + * to continue searching and indexing. + * + * @return The required amount of free disk space, in megabytes. + */ + public int getMinFreeSpace() { + return getInteger("minFreeSpace"); + } + + /** + * Returns the string that is prepended to the Splunk symmetric key to + * generate the final key that used to sign all traffic between master and + * slave licensers. + * + * @return Licenser symmetric key. + */ + public String getPass4SymmKey() { + return getString("pass4SymmKey"); + } + + /** + * Returns the name that is used to identify this Splunk instance for + * features such as distributed search. + * + * @return The name used to identify the Splunk instance. + */ + public String getServerName() { + return getString("serverName"); + } + + /** + * Returns the amount of time before a user session times out. + * + * @return The session time-out. + */ + public String getSessionTimeout() { + return getString("sessionTimeout"); + } + + /** + * Indicates whether the instance is configured to start Splunk Web. + * + * @return {@code true} if the instance is configured to start Splunk Web, + * {@code false} if Splunk Web is disabled. + */ + public boolean getStartWebServer() { + return getBoolean("startwebserver"); + } + + /** + * Returns the IP address of the authenticating proxy. + * + * @return The IP address of the authenticating proxy. + */ + public String getTrustedIP() { + return getString("trustedIP", null); + } + + /** + * Sets the fully-qualified local path to the default index. + * The default value is {@code $SPLUNK_HOME/var/lib/splunk/defaultdb/db/}. + * + * @param path The local path to the default index. + */ + public void setSplunkDBPath(String path) { + setCacheValue("SPLUNK_DB", path); + } + + /** + * Sets whether Splunk Web uses HTTP or HTTPS. + * + * @param useHttps {@code true} to use SSL and HTTPS, {@code false} to use + * HTTP. + */ + public void setEnableSplunkWebSSL(boolean useHttps) { + setCacheValue("enableSplunkWebSSL", useHttps); + } + + /** + * Sets the default host name to use for data inputs that do not override + * this setting. + * + * @param host The default host name. + */ + public void setHost(String host) { + setCacheValue("host", host); + } + + /** + * Sets the Splunk Web listening port. If Splunk uses SSL and HTTPS, this + * value should be set to the HTTPS port number. + * <p> + * <b>Note:</b> The port must be present for Splunk Web to start. If this + * value is omitted or set to 0, the server will not start an HTTP listener. + * @see #getEnableSplunkWebSSL + * + * @param port The Splunk Web listening port. + */ + public void setHttpPort(int port) { + setCacheValue("httpport", port); + } + + /** + * Sets the management port for splunkd. + * The default value is {@code 8089}. + * + * @param port The port for the management interface. + */ + public void setMgmtPort(int port) { + setCacheValue("mgmtHostPort", port); + } + + /** + * Sets the amount of free disk space that must exist for splunkd to + * continue operating. + * <p> + * Before attempting to run a search, Splunk requires this amount of + * free space on the file system where the dispatch directory is stored + * ({@code $SPLUNK_HOME/var/run/splunk/dispatch}). + * + * @param minFreeSpace The minimum free space, in megabytes. + */ + public void setMinimumFreeSpace(int minFreeSpace) { + setCacheValue("minFreeSpace", minFreeSpace); + } + + /** + * Sets the password string that is prepended to the Splunk symmetric key + * to generate the final key, which is used to sign all traffic between + * master/slave licensers. + * + * @param pass4SymmKey The prepended password string. + */ + public void setPasswordSymmKey(String pass4SymmKey) { + setCacheValue("pass4SymmKey", pass4SymmKey); + } + + /** + * Sets the name that is used to identify this Splunk instance for features + * such as distributed search. The default value is + * {@code <hostname>-<user running splunk>}. + * + * @param serverName The server name. + */ + public void setServerName(String serverName) { + setCacheValue("serverName", serverName); + } + + /** + * Sets the session timeout. + * The valid format is <i>number</i> followed by a time unit ("s", "h", + * or "d"). + * + * @param sessionTimeout The session timeout value. + */ + public void setSessionTimeout(String sessionTimeout) { + setCacheValue("sessionTimeout", sessionTimeout); + } + + /** + * Sets whether to start Splunk Web. + * + * @param startwebserver {@code true} to start Splunk Web, {@code false} if + * not. + */ + public void setStartWebServer(boolean startwebserver) { + setCacheValue("startwebserver", startwebserver); + } + + /** + * Sets the IP address of the authenticating proxy. Set this value to a + * valid IP address to enable SSO. + * + * This attribute is disabled by default. The normal value is "127.0.0.1". + * + * @param trustedIP The authenticating proxy's IP address. + */ + public void setTrustedIP(String trustedIP) { + setCacheValue("trustedIP", trustedIP); + } + + /** + * {@inheritDoc} + */ + @Override public void update(Map<String, Object> args) { + // Merge cached setters and live args together before updating. + HashMap<String, Object> mergedArgs = new HashMap<>(); + mergedArgs.putAll(toUpdate); + mergedArgs.putAll(args); + service.post(path + "/settings", mergedArgs); + toUpdate.clear(); + invalidate(); + } + + /** + * {@inheritDoc} + */ + @Override public void update() { + service.post(path + "/settings", toUpdate); + invalidate(); + } +} diff --git a/splunk/src/main/java/com/splunk/SimpleCookieStore.java b/splunk/src/main/java/com/splunk/SimpleCookieStore.java index 13e6f2ea..a4956f98 100644 --- a/splunk/src/main/java/com/splunk/SimpleCookieStore.java +++ b/splunk/src/main/java/com/splunk/SimpleCookieStore.java @@ -1,92 +1,92 @@ - -/* - * Copyright 2015 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.util.List; -import java.net.HttpCookie; -import java.util.Map; -import java.util.HashMap; - -/** - * The {@code SimpleCookieStore} class stores cookies for authentication. - */ -class SimpleCookieStore { - - public static final String SPLUNK_AUTH_COOKIE = "splunkd_"; - - private Map<String, String> cookieJar = new HashMap<>(); - /** - * Adds cookies from a "Set-Cookie" header to the cookie store. - * - * @param setCookieHeader The result from a getRequestHeader("Set-Cookie") call - */ - public void add(String setCookieHeader) { - if (setCookieHeader != null) { - List<HttpCookie> cookies = HttpCookie.parse(setCookieHeader); - for (HttpCookie cookie : cookies) { - cookieJar.put(cookie.getName(), cookie.getValue()); - } - } - } - - /** - * Returns a string to be set as a "Cookie" header - * - * @return Cookie String in the format "Key=Value; Key=Value; etc" - */ - public String getCookies() { - StringBuilder cookieStringBuilder = new StringBuilder(); - - for (Map.Entry<String, String> cookie : cookieJar.entrySet()) { - cookieStringBuilder.append(cookie.getKey()); - cookieStringBuilder.append("="); - cookieStringBuilder.append(cookie.getValue()); - cookieStringBuilder.append("; "); - } - return cookieStringBuilder.toString(); - } - - /** - * Returns true if the cookie store is empty, false otherwise - * - * @return Boolean for whether or not the cookie store is empty - */ - public Boolean isEmpty() { - return cookieJar.isEmpty(); - } - - public boolean hasSplunkAuthCookie(){ - if(cookieJar.isEmpty()){ - return false; - } - for(String cookie : cookieJar.keySet()){ - if(cookie.startsWith(SPLUNK_AUTH_COOKIE)){ - return true; - } - } - return false; - } - - /** - * Removes all cookies from SimpleCookieStore - */ - public void removeAll() { - cookieJar.clear(); - } - -} + +/* + * Copyright 2015 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.util.List; +import java.net.HttpCookie; +import java.util.Map; +import java.util.HashMap; + +/** + * The {@code SimpleCookieStore} class stores cookies for authentication. + */ +class SimpleCookieStore { + + public static final String SPLUNK_AUTH_COOKIE = "splunkd_"; + + private Map<String, String> cookieJar = new HashMap<>(); + /** + * Adds cookies from a "Set-Cookie" header to the cookie store. + * + * @param setCookieHeader The result from a getRequestHeader("Set-Cookie") call + */ + public void add(String setCookieHeader) { + if (setCookieHeader != null) { + List<HttpCookie> cookies = HttpCookie.parse(setCookieHeader); + for (HttpCookie cookie : cookies) { + cookieJar.put(cookie.getName(), cookie.getValue()); + } + } + } + + /** + * Returns a string to be set as a "Cookie" header + * + * @return Cookie String in the format "Key=Value; Key=Value; etc" + */ + public String getCookies() { + StringBuilder cookieStringBuilder = new StringBuilder(); + + for (Map.Entry<String, String> cookie : cookieJar.entrySet()) { + cookieStringBuilder.append(cookie.getKey()); + cookieStringBuilder.append("="); + cookieStringBuilder.append(cookie.getValue()); + cookieStringBuilder.append("; "); + } + return cookieStringBuilder.toString(); + } + + /** + * Returns true if the cookie store is empty, false otherwise + * + * @return Boolean for whether or not the cookie store is empty + */ + public Boolean isEmpty() { + return cookieJar.isEmpty(); + } + + public boolean hasSplunkAuthCookie(){ + if(cookieJar.isEmpty()){ + return false; + } + for(String cookie : cookieJar.keySet()){ + if(cookie.startsWith(SPLUNK_AUTH_COOKIE)){ + return true; + } + } + return false; + } + + /** + * Removes all cookies from SimpleCookieStore + */ + public void removeAll() { + cookieJar.clear(); + } + +} diff --git a/splunk/src/main/java/com/splunk/StreamIterableBase.java b/splunk/src/main/java/com/splunk/StreamIterableBase.java index 8c423d3d..4bcb0617 100644 --- a/splunk/src/main/java/com/splunk/StreamIterableBase.java +++ b/splunk/src/main/java/com/splunk/StreamIterableBase.java @@ -1,94 +1,94 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.IOException; -import java.util.Iterator; -import java.util.NoSuchElementException; - -/** - * Helper class for iterator over readers that only support a get operation - * with null return indicating the end. - * @param <T> Type of elements. - */ -abstract class StreamIterableBase<T> implements Iterable<T> { - private T cachedElement; - private boolean nextElementCached; - - public Iterator<T> iterator() { - - return new Iterator<T>() { - - public boolean hasNext() { - cacheNextElement(); - return cachedElement != null; - } - - public T next() { - cacheNextElement(); - // Once reaching the end, don't advance any more. - // Otherwise underlying reader may throw - // which can be confusing. - if (cachedElement == null) { - throw new NoSuchElementException(); - } - else { - nextElementCached = false; - } - return cachedElement; - } - - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - - /** - * Get the next element. - * @return null if the end is reached. - * @throws IOException The IOException instance - */ - abstract T getNextElement() throws IOException; - - /** - * Interrupt the iteration by setting the iterator to - * either the initial state or the end state. - * @param hasMoreResults Whether or not there are more results. - */ - void resetIteration(boolean hasMoreResults) { - // Throw away any not-null cached element. - cachedElement = null; - // If there's no more results, i.e., the end is reached, - // set nextElementCached to true so that - // the iterator will not try to get the next element. - // Otherwise, if getNextElement is called by the iterator, - // the underlying reader may throw which can be confusing. - nextElementCached = !hasMoreResults; - } - - private void cacheNextElement() { - if (!nextElementCached) { - try { - cachedElement = getNextElement(); - } catch (IOException e) { - throw new RuntimeException(e); - } - nextElementCached = true; - } - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.IOException; +import java.util.Iterator; +import java.util.NoSuchElementException; + +/** + * Helper class for iterator over readers that only support a get operation + * with null return indicating the end. + * @param <T> Type of elements. + */ +abstract class StreamIterableBase<T> implements Iterable<T> { + private T cachedElement; + private boolean nextElementCached; + + public Iterator<T> iterator() { + + return new Iterator<T>() { + + public boolean hasNext() { + cacheNextElement(); + return cachedElement != null; + } + + public T next() { + cacheNextElement(); + // Once reaching the end, don't advance any more. + // Otherwise underlying reader may throw + // which can be confusing. + if (cachedElement == null) { + throw new NoSuchElementException(); + } + else { + nextElementCached = false; + } + return cachedElement; + } + + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } + + /** + * Get the next element. + * @return null if the end is reached. + * @throws IOException The IOException instance + */ + abstract T getNextElement() throws IOException; + + /** + * Interrupt the iteration by setting the iterator to + * either the initial state or the end state. + * @param hasMoreResults Whether or not there are more results. + */ + void resetIteration(boolean hasMoreResults) { + // Throw away any not-null cached element. + cachedElement = null; + // If there's no more results, i.e., the end is reached, + // set nextElementCached to true so that + // the iterator will not try to get the next element. + // Otherwise, if getNextElement is called by the iterator, + // the underlying reader may throw which can be confusing. + nextElementCached = !hasMoreResults; + } + + private void cacheNextElement() { + if (!nextElementCached) { + try { + cachedElement = getNextElement(); + } catch (IOException e) { + throw new RuntimeException(e); + } + nextElementCached = true; + } + } +} diff --git a/splunk/src/main/java/com/splunk/TcpInput.java b/splunk/src/main/java/com/splunk/TcpInput.java index a7905e0a..741a52da 100644 --- a/splunk/src/main/java/com/splunk/TcpInput.java +++ b/splunk/src/main/java/com/splunk/TcpInput.java @@ -1,312 +1,312 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import java.io.IOException; -import java.io.OutputStream; -import java.net.Socket; - -/** - * The {@code TcpInput} class represents a raw TCP data input. This differs from - * a <i>cooked</i> TCP input in that this TCP input is in raw form, and is not - * processed (or "cooked"). - */ -public class TcpInput extends PortInput { - - /** - * Class constructor. - * - * @param service The connected {@code Service} instance. - * @param path The raw TCP input endpoint. - */ - TcpInput(Service service, String path) { - super(service, path); - } - - /** - * Returns a socket attached to this raw TCP input. - * @return Socket instance - * @throws IOException The IOException instance - */ - public Socket attach() throws IOException { - return new Socket(this.service.getHost(), this.getPort()); - } - - /** - * Submits events to this raw TCP input, reusing the connection. - * - * This method passes an output stream connected to the index to the - * {@code run} method of the {@code ReceiverBehavior} object, then handles - * setting up and tearing down the socket. - * For an example of how to use this method, see - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" target="_blank">How to - * get data into Splunk</a> on - * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" - * target="_blank">dev.splunk.com</a>. - * @param behavior The ReceiverBehavior instance - * @throws IOException The IOException instance - */ - public void attachWith(ReceiverBehavior behavior) throws IOException { - try (Socket socket = attach(); - OutputStream output = socket.getOutputStream();) { - behavior.run(output); - output.flush(); - } - } - - /** - * Returns an object that contains the inbound raw TCP connections. - * - * @return The TCP connections object. - */ - public TcpConnections connections() { - return new TcpConnections(service, path + "/connections"); - } - - /** - * Returns the style of host connection. Valid values are: "ip", "dns", and - * "none". - * - * @return The style of host connection, or {@code null} if not specified. - */ - public String getConnectionHost() { - return getString("connection_host", null); - } - - /** - * Returns the group of this raw TCP input. - * - * @return The group. - */ - public String getGroup() { - return getString("group", null); - } - - /** - * Returns the source host of this raw TCP input where this indexer gets its - * data. - * - * @return The source host, or {@code null} if not specified. - */ - public String getHost() { - return getString("host", null); - } - - /** - * Returns the index name of this raw TCP input. - * - * @return The index name, or {@code null} if not specified. - */ - public String getIndex() { - return getString("index", null); - } - - /** - * Returns the input kind of this input. - * - * @return The input kind. - */ - public InputKind getKind() { - return InputKind.Tcp; - } - - /** - * Returns the queue for this raw TCP input. Valid values are: - * "parsingQueue" and "indexQueue". - * - * @return The queue, or {@code null} if not specified. - */ - public String getQueue() { - return getString("queue", null); - } - - /** - * @deprecated Returns the value of the {@code _rcvbuf} attribute for this - * TCP input. - * - * @return The {@code _rcvbuf} value. - */ - public int getRcvBuf() { - return getInteger("_rcvbuf"); - } - - /** - * Returns the incoming host restriction for this raw TCP input. When - * specified, this input only accepts data from the specified host. - * - * @return The incoming host restriction, or {@code null} if not specified. - */ - public String getRestrictToHost() { - return getString("restrictToHost", null); - } - - /** - * Returns the initial source key for this raw TCP input. Typically this - * value is the input file path. - * - * @return The source key, or {@code null} if not specified. - */ - public String getSource() { - return getString("source", null); - } - - /** - * Returns the source type for events from this raw TCP input. - * - * @return The source type, or {@code null} if not specified. - */ - public String getSourceType() { - return getString("sourcetype", null); - } - - /** - * Indicates whether this raw TCP input is using secure socket layer (SSL). - * - * @return {@code true} if this input is using SSL, {@code false} if not. - */ - public boolean getSSL() { - return getBoolean("SSL", false); - } - - /** - * Sets whether to use secure socket layer (SSL). - * - * @param SSL {@code true} to use SSL, {@code false} if not. - */ - public void setSSL(boolean SSL) { - setCacheValue("SSL", SSL); - } - - /** - * Sets the value for the <b>from-host</b> field for the remote server that - * is sending data. Valid values are: <ul> - * <li>"ip": Sets the host to the IP address of the remote server sending - * data.</li> - * <li>"dns": Sets the host to the reverse DNS entry for the IP address of - * the remote server sending data.</li> - * <li>"none": Leaves the host as specified in inputs.conf, which is - * typically the Splunk system host name.</li></ul> - * - * @param connection_host The connection host information. - */ - public void setConnectionHost(String connection_host) { - setCacheValue("connection_host", connection_host); - } - - /** - * Sets whether this input is enabled or disabled. - * <p> - * <b>Note:</b> Using this method requires you to restart Splunk before this - * setting takes effect. To avoid restarting Splunk, use the - * {@code Entity.disable} and {@code Entity.enable} methods instead, which - * take effect immediately. - * - * @param disabled {@code true} to disable this input, {@code false} to - * enable it. - */ - public void setDisabled(boolean disabled) { - setCacheValue("disabled", disabled); - } - - /** - * Sets the host from which the indexer gets data. - * - * @param host The host name. - */ - public void setHost(String host) { - setCacheValue("host", host); - } - - /** - * Sets the index in which to store all generated events. - * - * @param index The index name. - */ - public void setIndex(String index) { - setCacheValue("index", index); - } - - /** - * Submit a single event to this raw TCP input by opening the connection, - * submitting the event, and closing the connection. To submit multiple - * events, use {@code attachWith} to open a single connection. - * @see #attachWith - * - * @param eventBody A string that contains the event. - * @throws IOException The IOException instance - */ - public void submit(String eventBody) throws IOException { - try (Socket socket = attach(); - OutputStream output = socket.getOutputStream();) { - output.write(eventBody.getBytes("UTF-8")); - output.flush(); - } - } - - /** - * Sets how the input processor should deposit the events it reads. Valid - * values are:<ul> - * <li>"parsingQueue": Applies props.conf and other parsing rules to your - * data.</li> - * <li>"indexQueue": Sends your data directly into the index.</li></ul> - * - * @param queue The queue-processing type. - */ - public void setQueue(String queue) { - setCacheValue("queue", queue); - } - - /** - * Sets the timeout value for adding a Done key. - * - * If a connection over the input port specified by {@code name} remains - * idle after receiving data for this specified number of seconds, it adds - * a Done key, implying that the last event has been completely received. - * - * @param rawTcpDoneTimeout The timeout value, in seconds. - */ - public void setRawTcpDoneTimeout(int rawTcpDoneTimeout) { - setCacheValue("rawTcpDoneTimeout", rawTcpDoneTimeout); - } - - /** - * Sets the initial value for the source key for events from this - * input. The source key is used during parsing and indexing. The - * <b>source</b> field is used for searches. As a convenience, the source - * string is prepended with "source::". - * <p> - * <b>Note:</b> Overriding the source key is generally not recommended. - * Typically, the input layer provides a more accurate string to aid in - * problem analysis and investigation, accurately recording the file from - * which the data was retrieved. Consider the use of source types, tagging, - * and search wildcards before overriding this value. - * - * @param source The source. - */ - public void setSource(String source) { - setCacheValue("source", source); - } - - /** - * Sets the source type for events from this raw TCP input. - * - * @param sourcetype The source type. - */ - public void setSourceType(String sourcetype) { - setCacheValue("sourcetype", sourcetype); - } -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import java.io.IOException; +import java.io.OutputStream; +import java.net.Socket; + +/** + * The {@code TcpInput} class represents a raw TCP data input. This differs from + * a <i>cooked</i> TCP input in that this TCP input is in raw form, and is not + * processed (or "cooked"). + */ +public class TcpInput extends PortInput { + + /** + * Class constructor. + * + * @param service The connected {@code Service} instance. + * @param path The raw TCP input endpoint. + */ + TcpInput(Service service, String path) { + super(service, path); + } + + /** + * Returns a socket attached to this raw TCP input. + * @return Socket instance + * @throws IOException The IOException instance + */ + public Socket attach() throws IOException { + return new Socket(this.service.getHost(), this.getPort()); + } + + /** + * Submits events to this raw TCP input, reusing the connection. + * + * This method passes an output stream connected to the index to the + * {@code run} method of the {@code ReceiverBehavior} object, then handles + * setting up and tearing down the socket. + * For an example of how to use this method, see + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" target="_blank">How to + * get data into Splunk</a> on + * <a href="http://dev.splunk.com/view/SP-CAAAEJ2" + * target="_blank">dev.splunk.com</a>. + * @param behavior The ReceiverBehavior instance + * @throws IOException The IOException instance + */ + public void attachWith(ReceiverBehavior behavior) throws IOException { + try (Socket socket = attach(); + OutputStream output = socket.getOutputStream();) { + behavior.run(output); + output.flush(); + } + } + + /** + * Returns an object that contains the inbound raw TCP connections. + * + * @return The TCP connections object. + */ + public TcpConnections connections() { + return new TcpConnections(service, path + "/connections"); + } + + /** + * Returns the style of host connection. Valid values are: "ip", "dns", and + * "none". + * + * @return The style of host connection, or {@code null} if not specified. + */ + public String getConnectionHost() { + return getString("connection_host", null); + } + + /** + * Returns the group of this raw TCP input. + * + * @return The group. + */ + public String getGroup() { + return getString("group", null); + } + + /** + * Returns the source host of this raw TCP input where this indexer gets its + * data. + * + * @return The source host, or {@code null} if not specified. + */ + public String getHost() { + return getString("host", null); + } + + /** + * Returns the index name of this raw TCP input. + * + * @return The index name, or {@code null} if not specified. + */ + public String getIndex() { + return getString("index", null); + } + + /** + * Returns the input kind of this input. + * + * @return The input kind. + */ + public InputKind getKind() { + return InputKind.Tcp; + } + + /** + * Returns the queue for this raw TCP input. Valid values are: + * "parsingQueue" and "indexQueue". + * + * @return The queue, or {@code null} if not specified. + */ + public String getQueue() { + return getString("queue", null); + } + + /** + * @deprecated Returns the value of the {@code _rcvbuf} attribute for this + * TCP input. + * + * @return The {@code _rcvbuf} value. + */ + public int getRcvBuf() { + return getInteger("_rcvbuf"); + } + + /** + * Returns the incoming host restriction for this raw TCP input. When + * specified, this input only accepts data from the specified host. + * + * @return The incoming host restriction, or {@code null} if not specified. + */ + public String getRestrictToHost() { + return getString("restrictToHost", null); + } + + /** + * Returns the initial source key for this raw TCP input. Typically this + * value is the input file path. + * + * @return The source key, or {@code null} if not specified. + */ + public String getSource() { + return getString("source", null); + } + + /** + * Returns the source type for events from this raw TCP input. + * + * @return The source type, or {@code null} if not specified. + */ + public String getSourceType() { + return getString("sourcetype", null); + } + + /** + * Indicates whether this raw TCP input is using secure socket layer (SSL). + * + * @return {@code true} if this input is using SSL, {@code false} if not. + */ + public boolean getSSL() { + return getBoolean("SSL", false); + } + + /** + * Sets whether to use secure socket layer (SSL). + * + * @param SSL {@code true} to use SSL, {@code false} if not. + */ + public void setSSL(boolean SSL) { + setCacheValue("SSL", SSL); + } + + /** + * Sets the value for the <b>from-host</b> field for the remote server that + * is sending data. Valid values are: <ul> + * <li>"ip": Sets the host to the IP address of the remote server sending + * data.</li> + * <li>"dns": Sets the host to the reverse DNS entry for the IP address of + * the remote server sending data.</li> + * <li>"none": Leaves the host as specified in inputs.conf, which is + * typically the Splunk system host name.</li></ul> + * + * @param connection_host The connection host information. + */ + public void setConnectionHost(String connection_host) { + setCacheValue("connection_host", connection_host); + } + + /** + * Sets whether this input is enabled or disabled. + * <p> + * <b>Note:</b> Using this method requires you to restart Splunk before this + * setting takes effect. To avoid restarting Splunk, use the + * {@code Entity.disable} and {@code Entity.enable} methods instead, which + * take effect immediately. + * + * @param disabled {@code true} to disable this input, {@code false} to + * enable it. + */ + public void setDisabled(boolean disabled) { + setCacheValue("disabled", disabled); + } + + /** + * Sets the host from which the indexer gets data. + * + * @param host The host name. + */ + public void setHost(String host) { + setCacheValue("host", host); + } + + /** + * Sets the index in which to store all generated events. + * + * @param index The index name. + */ + public void setIndex(String index) { + setCacheValue("index", index); + } + + /** + * Submit a single event to this raw TCP input by opening the connection, + * submitting the event, and closing the connection. To submit multiple + * events, use {@code attachWith} to open a single connection. + * @see #attachWith + * + * @param eventBody A string that contains the event. + * @throws IOException The IOException instance + */ + public void submit(String eventBody) throws IOException { + try (Socket socket = attach(); + OutputStream output = socket.getOutputStream();) { + output.write(eventBody.getBytes("UTF-8")); + output.flush(); + } + } + + /** + * Sets how the input processor should deposit the events it reads. Valid + * values are:<ul> + * <li>"parsingQueue": Applies props.conf and other parsing rules to your + * data.</li> + * <li>"indexQueue": Sends your data directly into the index.</li></ul> + * + * @param queue The queue-processing type. + */ + public void setQueue(String queue) { + setCacheValue("queue", queue); + } + + /** + * Sets the timeout value for adding a Done key. + * + * If a connection over the input port specified by {@code name} remains + * idle after receiving data for this specified number of seconds, it adds + * a Done key, implying that the last event has been completely received. + * + * @param rawTcpDoneTimeout The timeout value, in seconds. + */ + public void setRawTcpDoneTimeout(int rawTcpDoneTimeout) { + setCacheValue("rawTcpDoneTimeout", rawTcpDoneTimeout); + } + + /** + * Sets the initial value for the source key for events from this + * input. The source key is used during parsing and indexing. The + * <b>source</b> field is used for searches. As a convenience, the source + * string is prepended with "source::". + * <p> + * <b>Note:</b> Overriding the source key is generally not recommended. + * Typically, the input layer provides a more accurate string to aid in + * problem analysis and investigation, accurately recording the file from + * which the data was retrieved. Consider the use of source types, tagging, + * and search wildcards before overriding this value. + * + * @param source The source. + */ + public void setSource(String source) { + setCacheValue("source", source); + } + + /** + * Sets the source type for events from this raw TCP input. + * + * @param sourcetype The source type. + */ + public void setSourceType(String sourcetype) { + setCacheValue("sourcetype", sourcetype); + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/InputDefinition.java b/splunk/src/main/java/com/splunk/modularinput/InputDefinition.java index abfc256f..e18bdfae 100644 --- a/splunk/src/main/java/com/splunk/modularinput/InputDefinition.java +++ b/splunk/src/main/java/com/splunk/modularinput/InputDefinition.java @@ -1,224 +1,224 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -import org.w3c.dom.Document; -import org.w3c.dom.Node; -import org.xml.sax.SAXException; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import java.io.IOException; -import java.io.InputStream; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * The {@code InputDefinition} class encodes the XML defining inputs that Splunk passes to - * a modular input script as a Java object. - */ -public class InputDefinition { - // We use a map to hold all parameters such as server host, server URI, etc. instead of individual fields - // so that additions to the input definition contract in the future won't break this implementation. It also - // simplifies the parsing code below. - private Map<String,String> metadata; - - private Map<String, Map<String, Parameter>> inputs; - - private final String serverHostField = "server_host"; - private final String serverUriField = "server_uri"; - private final String checkpointDirField = "checkpoint_dir"; - private final String sessionKeyField = "session_key"; - - // Package private on purpose - InputDefinition() { - inputs = new HashMap<>(); - metadata = new HashMap<>(); - } - - /** - * Gets the name of the field to fetch. - * - * In future versions of Splunk, there may be additional fields on the {@code InputDefinition}. {@code getField} permits - * access to them in case you are constrained to an old version of the Splunk SDK for Java. - * - * @param fieldName The name of the field to fetch. - * @return The field. - */ - public String getField(String fieldName) { - return this.metadata.get(fieldName); - } - - /** - * Sets the name of the server on which this modular input is being run. - * @param serverHost String value - */ - public void setServerHost(String serverHost) { - this.metadata.put(serverHostField, serverHost); - } - - /** - * Gets the name of the server on which this modular input is being run. - * - * @return The name of the server on which this modular input is being run. - */ - public String getServerHost() { - return this.metadata.get(serverHostField); - } - - /** - * Sets the URI to reach the server on which this modular input is being run. - * - * @param serverUri The URI to reach the server on which this modular input is being run. - */ - public void setServerUri(String serverUri) { - this.metadata.put(serverUriField, serverUri); - } - - /** - * Gets the URI to the server on which this modular input is being run. - * - * @return The URI to the server on which this modular input is being run. - */ - public String getServerUri() { - return this.metadata.get(serverUriField); - } - - /** - * Sets the path to which to write checkpoint files. - * - * @param checkpointDir The path to which to write checkpoint files. - */ - public void setCheckpointDir(String checkpointDir) { - this.metadata.put(checkpointDirField, checkpointDir); - } - - /** - * Gets the path to which to write checkpoint files for restarting inputs. - * - * @return The path to which to write checkpoint files for restarting inputs. - */ - public String getCheckpointDir() { - return this.metadata.get(checkpointDirField); - } - - /** - * Sets a session key that can be used to access splunkd's REST API. - * - * @param sessionKey A session key that can be used to access splunkd's REST API. - */ - public void setSessionKey(String sessionKey) { - this.metadata.put(sessionKeyField, sessionKey); - } - - /** - * Sets a session providing access to splunkd's REST API on this host. - * - * @return A session key providing access to splunkd's REST API on this host. - */ - public String getSessionKey() { - return this.metadata.get(sessionKeyField); - } - - /** - * Adds an input to the set of inputs on this {@code InputDefinition}. - * - * @param name The name of this input (e.g., foobar://this-input-name). - * @param parameters A collection of {@code Parameter} objects giving the settings for this input. - */ - public void addInput(String name, Collection<Parameter> parameters) { - Map<String, Parameter> paramMap = new HashMap<>(); - - for (Parameter p : parameters) { - paramMap.put(p.getName(), p); - } - - this.inputs.put(name, paramMap); - } - - /** - * @return A map of all the inputs specified in this {@code InputDefinition}. - */ - public Map<String, Map<String, Parameter>> getInputs() { - return this.inputs; - } - - /** - * Parses a stream containing XML into an InputDefinition. - * - * @param stream The stream containing XML to parse. - * @return An {@code InputDefinition} object. - * @throws ParserConfigurationException If there are errors in setting up the parser (which indicates system - * configuration issues). - * @throws IOException If there is an error in reading from the stream. - * @throws SAXException When the XML is invalid. - * @throws MalformedDataException When the XML does specify a valid set of inputs. - */ - public static InputDefinition parseDefinition(InputStream stream) throws ParserConfigurationException, - IOException, SAXException, MalformedDataException { - DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); - documentBuilderFactory.setIgnoringElementContentWhitespace(true); - documentBuilderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); - documentBuilderFactory.setExpandEntityReferences(false); - documentBuilderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); - documentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", false); - DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); - Document doc = documentBuilder.parse(stream); - - InputDefinition definition = new InputDefinition(); - for (Node node = doc.getDocumentElement().getFirstChild(); node != null; node = node.getNextSibling()) { - if (node.getNodeType() == Node.TEXT_NODE) { - continue; - } else if (node.getNodeName().equals("configuration")) { - for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) { - if (child.getNodeType() == Node.TEXT_NODE) { - continue; - } - if (!child.getNodeName().equals("stanza")) { - throw new MalformedDataException("Expected stanza element; found " + child.getNodeName()); - } - String name = child.getAttributes().getNamedItem("name").getNodeValue(); - List<Parameter> parameter = Parameter.nodeToParameterList(child); - definition.addInput(name, parameter); - } - } else { - definition.metadata.put( - node.getNodeName(), - XmlUtil.textInNode(node, "Expected a text value in element " + node.getNodeName()) - ); - } - } - - return definition; - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof InputDefinition that)) { - return false; - } - return this.metadata.equals(that.metadata) && this.inputs.equals(that.inputs); - } - - @Override - public int hashCode() { - return this.metadata.hashCode() ^ (this.getInputs() == null ? 0 : this.getInputs().hashCode()); - } -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.IOException; +import java.io.InputStream; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * The {@code InputDefinition} class encodes the XML defining inputs that Splunk passes to + * a modular input script as a Java object. + */ +public class InputDefinition { + // We use a map to hold all parameters such as server host, server URI, etc. instead of individual fields + // so that additions to the input definition contract in the future won't break this implementation. It also + // simplifies the parsing code below. + private Map<String,String> metadata; + + private Map<String, Map<String, Parameter>> inputs; + + private final String serverHostField = "server_host"; + private final String serverUriField = "server_uri"; + private final String checkpointDirField = "checkpoint_dir"; + private final String sessionKeyField = "session_key"; + + // Package private on purpose + InputDefinition() { + inputs = new HashMap<>(); + metadata = new HashMap<>(); + } + + /** + * Gets the name of the field to fetch. + * + * In future versions of Splunk, there may be additional fields on the {@code InputDefinition}. {@code getField} permits + * access to them in case you are constrained to an old version of the Splunk SDK for Java. + * + * @param fieldName The name of the field to fetch. + * @return The field. + */ + public String getField(String fieldName) { + return this.metadata.get(fieldName); + } + + /** + * Sets the name of the server on which this modular input is being run. + * @param serverHost String value + */ + public void setServerHost(String serverHost) { + this.metadata.put(serverHostField, serverHost); + } + + /** + * Gets the name of the server on which this modular input is being run. + * + * @return The name of the server on which this modular input is being run. + */ + public String getServerHost() { + return this.metadata.get(serverHostField); + } + + /** + * Sets the URI to reach the server on which this modular input is being run. + * + * @param serverUri The URI to reach the server on which this modular input is being run. + */ + public void setServerUri(String serverUri) { + this.metadata.put(serverUriField, serverUri); + } + + /** + * Gets the URI to the server on which this modular input is being run. + * + * @return The URI to the server on which this modular input is being run. + */ + public String getServerUri() { + return this.metadata.get(serverUriField); + } + + /** + * Sets the path to which to write checkpoint files. + * + * @param checkpointDir The path to which to write checkpoint files. + */ + public void setCheckpointDir(String checkpointDir) { + this.metadata.put(checkpointDirField, checkpointDir); + } + + /** + * Gets the path to which to write checkpoint files for restarting inputs. + * + * @return The path to which to write checkpoint files for restarting inputs. + */ + public String getCheckpointDir() { + return this.metadata.get(checkpointDirField); + } + + /** + * Sets a session key that can be used to access splunkd's REST API. + * + * @param sessionKey A session key that can be used to access splunkd's REST API. + */ + public void setSessionKey(String sessionKey) { + this.metadata.put(sessionKeyField, sessionKey); + } + + /** + * Sets a session providing access to splunkd's REST API on this host. + * + * @return A session key providing access to splunkd's REST API on this host. + */ + public String getSessionKey() { + return this.metadata.get(sessionKeyField); + } + + /** + * Adds an input to the set of inputs on this {@code InputDefinition}. + * + * @param name The name of this input (e.g., foobar://this-input-name). + * @param parameters A collection of {@code Parameter} objects giving the settings for this input. + */ + public void addInput(String name, Collection<Parameter> parameters) { + Map<String, Parameter> paramMap = new HashMap<>(); + + for (Parameter p : parameters) { + paramMap.put(p.getName(), p); + } + + this.inputs.put(name, paramMap); + } + + /** + * @return A map of all the inputs specified in this {@code InputDefinition}. + */ + public Map<String, Map<String, Parameter>> getInputs() { + return this.inputs; + } + + /** + * Parses a stream containing XML into an InputDefinition. + * + * @param stream The stream containing XML to parse. + * @return An {@code InputDefinition} object. + * @throws ParserConfigurationException If there are errors in setting up the parser (which indicates system + * configuration issues). + * @throws IOException If there is an error in reading from the stream. + * @throws SAXException When the XML is invalid. + * @throws MalformedDataException When the XML does specify a valid set of inputs. + */ + public static InputDefinition parseDefinition(InputStream stream) throws ParserConfigurationException, + IOException, SAXException, MalformedDataException { + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + documentBuilderFactory.setIgnoringElementContentWhitespace(true); + documentBuilderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + documentBuilderFactory.setExpandEntityReferences(false); + documentBuilderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); + documentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", false); + DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); + Document doc = documentBuilder.parse(stream); + + InputDefinition definition = new InputDefinition(); + for (Node node = doc.getDocumentElement().getFirstChild(); node != null; node = node.getNextSibling()) { + if (node.getNodeType() == Node.TEXT_NODE) { + continue; + } else if (node.getNodeName().equals("configuration")) { + for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) { + if (child.getNodeType() == Node.TEXT_NODE) { + continue; + } + if (!child.getNodeName().equals("stanza")) { + throw new MalformedDataException("Expected stanza element; found " + child.getNodeName()); + } + String name = child.getAttributes().getNamedItem("name").getNodeValue(); + List<Parameter> parameter = Parameter.nodeToParameterList(child); + definition.addInput(name, parameter); + } + } else { + definition.metadata.put( + node.getNodeName(), + XmlUtil.textInNode(node, "Expected a text value in element " + node.getNodeName()) + ); + } + } + + return definition; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof InputDefinition that)) { + return false; + } + return this.metadata.equals(that.metadata) && this.inputs.equals(that.inputs); + } + + @Override + public int hashCode() { + return this.metadata.hashCode() ^ (this.getInputs() == null ? 0 : this.getInputs().hashCode()); + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/MultiValueParameter.java b/splunk/src/main/java/com/splunk/modularinput/MultiValueParameter.java index ceb7dd2f..6d86ff4e 100644 --- a/splunk/src/main/java/com/splunk/modularinput/MultiValueParameter.java +++ b/splunk/src/main/java/com/splunk/modularinput/MultiValueParameter.java @@ -1,82 +1,82 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -import java.util.ArrayList; -import java.util.List; - -/** - * The {@code MultiValueParameter} class represents a parameter containing multiple values that is passed as part of a definition - * of a modular input instance. {@code MultiValueParameter} objects correspond to XML fragments of the form: - * - * <pre> - * {@code - * <param_list name="multiValue"> - * <value>value1</value> - * <value>value2</value> - * </param_list> - * } - * </pre> - */ -public class MultiValueParameter extends Parameter { - private final String name; - private final List<String> values; - - // Note: package private constructor by design so parameters cannot be instantiated by the user. - MultiValueParameter(String name) { - this.name = name; - this.values = new ArrayList<>(); - } - - /** - * Gets the name of this parameter. - * - * @return The name of this parameter. - */ - public String getName() { - return this.name; - } - - /** - * Gets a list of all values of this parameter. - * - * @return A list of all values of this parameter. - */ - public List<String> getValues() { - return this.values; - } - - // Package private by design. - void appendValue(String value) { - this.values.add(value); - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof MultiValueParameter that)) { - return false; - } else { - return this.values.equals(that.values) && this.name.equals(that.name); - } - } - - @Override - public int hashCode() { - return (this.name == null ? 0 : this.name.hashCode()) ^ - (this.values == null ? 0 : this.values.hashCode()); - } -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +import java.util.ArrayList; +import java.util.List; + +/** + * The {@code MultiValueParameter} class represents a parameter containing multiple values that is passed as part of a definition + * of a modular input instance. {@code MultiValueParameter} objects correspond to XML fragments of the form: + * + * <pre> + * {@code + * <param_list name="multiValue"> + * <value>value1</value> + * <value>value2</value> + * </param_list> + * } + * </pre> + */ +public class MultiValueParameter extends Parameter { + private final String name; + private final List<String> values; + + // Note: package private constructor by design so parameters cannot be instantiated by the user. + MultiValueParameter(String name) { + this.name = name; + this.values = new ArrayList<>(); + } + + /** + * Gets the name of this parameter. + * + * @return The name of this parameter. + */ + public String getName() { + return this.name; + } + + /** + * Gets a list of all values of this parameter. + * + * @return A list of all values of this parameter. + */ + public List<String> getValues() { + return this.values; + } + + // Package private by design. + void appendValue(String value) { + this.values.add(value); + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof MultiValueParameter that)) { + return false; + } else { + return this.values.equals(that.values) && this.name.equals(that.name); + } + } + + @Override + public int hashCode() { + return (this.name == null ? 0 : this.name.hashCode()) ^ + (this.values == null ? 0 : this.values.hashCode()); + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/Parameter.java b/splunk/src/main/java/com/splunk/modularinput/Parameter.java index 30b31cc3..1d711bca 100644 --- a/splunk/src/main/java/com/splunk/modularinput/Parameter.java +++ b/splunk/src/main/java/com/splunk/modularinput/Parameter.java @@ -1,98 +1,98 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -import org.w3c.dom.Node; - -import java.util.ArrayList; -import java.util.List; - -/** - * The {@code Parameter} class is a base class for parameters of modular inputs. It has two subclasses: {@code SingleValueParameter} - * and {@code MultiValueParameter}. - * - * All parameters should be constructed with the static {@code nodeToParameterList} method, which takes an XML {@code org.w3c.dom.Node} - * object as its argument and returns a list of {@code Parameter} objects, single valued or multi valued as needed. - */ -public abstract class Parameter { - public abstract String getName(); - - // Package private to enforce using the nodeToParameterList function to create Parameter objects. - Parameter() { - super(); - } - - /** - * Generates a list of {@code Parameter} objects from an {@code org.w3c.dom.Node} object containing a set of parameters. The node - * may be any element, but is expected to contain elements param or param_list, as in - * - * <pre> - * {@code - * <stanza name="foobar://aaa"> - * <param name="param1">value1</param> - * <param name="param2">value2</param> - * <param name="disabled">0</param> - * <param name="index">default</param> - * <param_list name="multiValue"> - * <value>value1</value> - * <value>value2</value> - * </param_list> - * </stanza> - * } - * </pre> - * - * @param node An {@code org.w3c.dom.Node} object containing the parameter list as children. - * @return A list of Parameter objects extracted from the XML. - * @throws com.splunk.modularinput.MalformedDataException If the XML does not specify a valid parameter list. - */ - public static List<Parameter> nodeToParameterList(Node node) throws MalformedDataException { - List<Parameter> parameters = new ArrayList<>(); - - for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) { - if (child.getNodeType() == Node.TEXT_NODE) { - continue; - } - if ("param".equals(child.getNodeName())) { - // This is a single value parameter - String name = child.getAttributes().getNamedItem("name").getNodeValue(); - String value = XmlUtil.textInNode(child, "Element param with name=\"" + name + - "\" did not contain text."); - parameters.add(new SingleValueParameter(name, value)); - } else if ("param_list".equals(child.getNodeName())) { - String name = child.getAttributes().getNamedItem("name").getNodeValue(); - MultiValueParameter parameter = new MultiValueParameter(name); - for (Node valueNode = child.getFirstChild(); valueNode != null; valueNode = valueNode.getNextSibling()) { - if (valueNode.getNodeType() == Node.TEXT_NODE) continue; - if (!"value".equals(valueNode.getNodeName())) { - throw new MalformedDataException("Expected a value element in parameter named " + - child.getNodeName() + "; found " + valueNode.getNodeName()); - } else { - parameter.appendValue(XmlUtil.textInNode( - valueNode, - "value element in parameter named " + child.getNodeName() + " did not contain text." - )); - } - } - parameters.add(parameter); - } else { - throw new MalformedDataException("Bad parameter element named " + child.getNodeName()); - } - } - - return parameters; - } -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +import org.w3c.dom.Node; + +import java.util.ArrayList; +import java.util.List; + +/** + * The {@code Parameter} class is a base class for parameters of modular inputs. It has two subclasses: {@code SingleValueParameter} + * and {@code MultiValueParameter}. + * + * All parameters should be constructed with the static {@code nodeToParameterList} method, which takes an XML {@code org.w3c.dom.Node} + * object as its argument and returns a list of {@code Parameter} objects, single valued or multi valued as needed. + */ +public abstract class Parameter { + public abstract String getName(); + + // Package private to enforce using the nodeToParameterList function to create Parameter objects. + Parameter() { + super(); + } + + /** + * Generates a list of {@code Parameter} objects from an {@code org.w3c.dom.Node} object containing a set of parameters. The node + * may be any element, but is expected to contain elements param or param_list, as in + * + * <pre> + * {@code + * <stanza name="foobar://aaa"> + * <param name="param1">value1</param> + * <param name="param2">value2</param> + * <param name="disabled">0</param> + * <param name="index">default</param> + * <param_list name="multiValue"> + * <value>value1</value> + * <value>value2</value> + * </param_list> + * </stanza> + * } + * </pre> + * + * @param node An {@code org.w3c.dom.Node} object containing the parameter list as children. + * @return A list of Parameter objects extracted from the XML. + * @throws com.splunk.modularinput.MalformedDataException If the XML does not specify a valid parameter list. + */ + public static List<Parameter> nodeToParameterList(Node node) throws MalformedDataException { + List<Parameter> parameters = new ArrayList<>(); + + for (Node child = node.getFirstChild(); child != null; child = child.getNextSibling()) { + if (child.getNodeType() == Node.TEXT_NODE) { + continue; + } + if ("param".equals(child.getNodeName())) { + // This is a single value parameter + String name = child.getAttributes().getNamedItem("name").getNodeValue(); + String value = XmlUtil.textInNode(child, "Element param with name=\"" + name + + "\" did not contain text."); + parameters.add(new SingleValueParameter(name, value)); + } else if ("param_list".equals(child.getNodeName())) { + String name = child.getAttributes().getNamedItem("name").getNodeValue(); + MultiValueParameter parameter = new MultiValueParameter(name); + for (Node valueNode = child.getFirstChild(); valueNode != null; valueNode = valueNode.getNextSibling()) { + if (valueNode.getNodeType() == Node.TEXT_NODE) continue; + if (!"value".equals(valueNode.getNodeName())) { + throw new MalformedDataException("Expected a value element in parameter named " + + child.getNodeName() + "; found " + valueNode.getNodeName()); + } else { + parameter.appendValue(XmlUtil.textInNode( + valueNode, + "value element in parameter named " + child.getNodeName() + " did not contain text." + )); + } + } + parameters.add(parameter); + } else { + throw new MalformedDataException("Bad parameter element named " + child.getNodeName()); + } + } + + return parameters; + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/Scheme.java b/splunk/src/main/java/com/splunk/modularinput/Scheme.java index 086d6534..dbc99ce6 100644 --- a/splunk/src/main/java/com/splunk/modularinput/Scheme.java +++ b/splunk/src/main/java/com/splunk/modularinput/Scheme.java @@ -1,244 +1,244 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -import org.w3c.dom.Document; -import org.w3c.dom.Element; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import java.util.ArrayList; -import java.util.List; - - -/** - * The {@code Scheme} class represents the metadata for a modular input kind. - * - * A {@code Scheme} specifies a title, description, several options of how Splunk should run modular inputs of this - * kind, and a set of arguments which define a particular modular input's properties. - * - * The primary use of {@code Scheme} is to abstract away the construction of XML to feed to Splunk. - */ -public class Scheme { - private static DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); - - public enum StreamingMode { SIMPLE, XML }; - - // Name of this module input kind. <tt>title</tt> will be used as the URL scheme when - // specifying particular modular inputs. For example, if <tt>title</tt> is <tt>"abc"</tt>, - // a particular modular input of this kind would be referenced as <tt>abc://some_name</tt>. - protected String title; - - // Human readable description of this modular input kind. - protected String description = null; - - // Should this script be called by Splunk to validate the configuration of modular inputs of this kind? - // If false, then Splunk does some basic sanity checking. - protected boolean useExternalValidation = true; - - // Should all modular inputs of this kind share a single instance of this script? - protected boolean useSingleInstance = false; - - // Will events be streamed to Splunk from this modular input in simple text or in XML? XML is the default - // and should be preferred unless you have a really good reason to choose otherwise. - protected StreamingMode streamingMode = StreamingMode.XML; - - // A List of all the arguments that this modular input kind takes. - protected List<Argument> arguments; - - public Scheme(String title) { - this.title = title; - this.arguments = new ArrayList<>(); - } - - /** - * Gets the title of this modular input kind. - * - * @return The title of this modular input kind. - */ - public String getTitle() { - return title; - } - - /** - * Sets the title of this modular input kind. - * - * @param title The title of this modular input kind. - */ - public void setTitle(String title) { - this.title = title; - } - - /** - * Gets the human readable description of this modular input kind. - * - * @return The human readable description of this modular input kind. - */ - public String getDescription() { - return description; - } - - /** - * Sets the human readable description of this modular input kind. - * - * @param description The human readable description of this modular input kind. - */ - public void setDescription(String description) { - this.description = description; - } - - /** - * Returns whether Splunk should use the modular input kind script to validate the arguments - * of a particular modular input or use the validation predicates specified by the arguments. - * - * @return {@code true} if Splunk should use the modular input kind script to validate the arguments - * of a particular modular input, {@code false} if it should use the validation predicates specified by the arguments. - */ - public boolean isUseExternalValidation() { - return useExternalValidation; - } - - /** - * Specifies whether Splunk should use the modular input kind script to validate the arguments - * of a particular modular input (true) or use the validation predicates specified by the arguments (false). - * - * @param useExternalValidation {@code true} if Splunk should use the modular input kind script to validate the arguments - * of a particular modular input, {@code false} if it should use the validation predicates specified by the arguments. - */ - public void setUseExternalValidation(boolean useExternalValidation) { - this.useExternalValidation = useExternalValidation; - } - - /** - * Returns whether Splunk should run all modular inputs of this kind via one instance of the script - * or start an instance for each modular input. - * - * @return {@code true} if Splunk should run all modular inputs of this kind via one instance of the script, - * {@code false} if it should start an instance for each modular input. - */ - public boolean isUseSingleInstance() { - return useSingleInstance; - } - - /** - * Specifies whether Splunk should run all modular inputs of this kind via one instance of the script - * or start an instance for each modular input. - * - * @param useSingleInstance {@code true} if Splunk should run all modular inputs of this kind via one instance of the script, - * {@code false} if it should start an instance for each modular input. - */ - public void setUseSingleInstance(boolean useSingleInstance) { - this.useSingleInstance = useSingleInstance; - } - - /** - * Returns whether this modular input kind will send events to Splunk as XML (the default and preferred - * value) or plain text. - * - * @return The streaming mode. - */ - public StreamingMode getStreamingMode() { - return streamingMode; - } - - /** - * Specifies whether this modular input kind will send events to Splunk as XML (the default and preferred - * value) or plain text. - * - * @param streamingMode The streaming mode. - */ - public void setStreamingMode(StreamingMode streamingMode) { - this.streamingMode = streamingMode; - } - - /** - * Returns all the arguments to this modular input kind. - * - * @return A list of all the arguments to this modular input kind. - */ - public List<Argument> getArguments() { - return arguments; - } - - /** - * Replaces the current list of arguments with the specified one. - * - * @param arguments The list of arguments with which to replace the current - * list of arguments. - */ - public void setArguments(List<Argument> arguments) { - this.arguments = new ArrayList<>(arguments); - } - - /** - * Appends an argument to the arguments that this modular input kind takes. - * - * @param argument The argument to append to the arguments. - */ - public void addArgument(Argument argument) { - this.arguments.add(argument); - } - - /** - * Generates an XML encoding of this scheme to be passed to Splunk. - * - * @return An {@code org.w3c.dom.Document} object containing the XML of this scheme. - * @throws ParserConfigurationException If there was a problem configuring the XML libraries. - */ - Document toXml() throws ParserConfigurationException { - DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); - Document doc = documentBuilder.newDocument(); - - Element root = doc.createElement("scheme"); - doc.appendChild(root); - - Element title = doc.createElement("title"); - title.appendChild(doc.createTextNode(this.title)); - root.appendChild(title); - - if (this.description != null) { - Element description = doc.createElement("description"); - description.appendChild(doc.createTextNode(this.description)); - root.appendChild(description); - } - - Element useExternalValidation = doc.createElement("use_external_validation"); - useExternalValidation.appendChild(doc.createTextNode(Boolean.toString(this.useExternalValidation))); - root.appendChild(useExternalValidation); - - Element useSingleInstance = doc.createElement("use_single_instance"); - useSingleInstance.appendChild(doc.createTextNode(Boolean.toString(this.useSingleInstance))); - root.appendChild(useSingleInstance); - - Element streamingMode = doc.createElement("streaming_mode"); - streamingMode.appendChild(doc.createTextNode(this.streamingMode == StreamingMode.SIMPLE ? "simple" : "xml")); - root.appendChild(streamingMode); - - Element endpoint = doc.createElement("endpoint"); - root.appendChild(endpoint); - - Element args = doc.createElement("args"); - endpoint.appendChild(args); - - for (Argument arg : this.arguments) { - arg.addToDocument(doc, args); - } - - return doc; - } -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +import org.w3c.dom.Document; +import org.w3c.dom.Element; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.util.ArrayList; +import java.util.List; + + +/** + * The {@code Scheme} class represents the metadata for a modular input kind. + * + * A {@code Scheme} specifies a title, description, several options of how Splunk should run modular inputs of this + * kind, and a set of arguments which define a particular modular input's properties. + * + * The primary use of {@code Scheme} is to abstract away the construction of XML to feed to Splunk. + */ +public class Scheme { + private static DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + + public enum StreamingMode { SIMPLE, XML }; + + // Name of this module input kind. <tt>title</tt> will be used as the URL scheme when + // specifying particular modular inputs. For example, if <tt>title</tt> is <tt>"abc"</tt>, + // a particular modular input of this kind would be referenced as <tt>abc://some_name</tt>. + protected String title; + + // Human readable description of this modular input kind. + protected String description = null; + + // Should this script be called by Splunk to validate the configuration of modular inputs of this kind? + // If false, then Splunk does some basic sanity checking. + protected boolean useExternalValidation = true; + + // Should all modular inputs of this kind share a single instance of this script? + protected boolean useSingleInstance = false; + + // Will events be streamed to Splunk from this modular input in simple text or in XML? XML is the default + // and should be preferred unless you have a really good reason to choose otherwise. + protected StreamingMode streamingMode = StreamingMode.XML; + + // A List of all the arguments that this modular input kind takes. + protected List<Argument> arguments; + + public Scheme(String title) { + this.title = title; + this.arguments = new ArrayList<>(); + } + + /** + * Gets the title of this modular input kind. + * + * @return The title of this modular input kind. + */ + public String getTitle() { + return title; + } + + /** + * Sets the title of this modular input kind. + * + * @param title The title of this modular input kind. + */ + public void setTitle(String title) { + this.title = title; + } + + /** + * Gets the human readable description of this modular input kind. + * + * @return The human readable description of this modular input kind. + */ + public String getDescription() { + return description; + } + + /** + * Sets the human readable description of this modular input kind. + * + * @param description The human readable description of this modular input kind. + */ + public void setDescription(String description) { + this.description = description; + } + + /** + * Returns whether Splunk should use the modular input kind script to validate the arguments + * of a particular modular input or use the validation predicates specified by the arguments. + * + * @return {@code true} if Splunk should use the modular input kind script to validate the arguments + * of a particular modular input, {@code false} if it should use the validation predicates specified by the arguments. + */ + public boolean isUseExternalValidation() { + return useExternalValidation; + } + + /** + * Specifies whether Splunk should use the modular input kind script to validate the arguments + * of a particular modular input (true) or use the validation predicates specified by the arguments (false). + * + * @param useExternalValidation {@code true} if Splunk should use the modular input kind script to validate the arguments + * of a particular modular input, {@code false} if it should use the validation predicates specified by the arguments. + */ + public void setUseExternalValidation(boolean useExternalValidation) { + this.useExternalValidation = useExternalValidation; + } + + /** + * Returns whether Splunk should run all modular inputs of this kind via one instance of the script + * or start an instance for each modular input. + * + * @return {@code true} if Splunk should run all modular inputs of this kind via one instance of the script, + * {@code false} if it should start an instance for each modular input. + */ + public boolean isUseSingleInstance() { + return useSingleInstance; + } + + /** + * Specifies whether Splunk should run all modular inputs of this kind via one instance of the script + * or start an instance for each modular input. + * + * @param useSingleInstance {@code true} if Splunk should run all modular inputs of this kind via one instance of the script, + * {@code false} if it should start an instance for each modular input. + */ + public void setUseSingleInstance(boolean useSingleInstance) { + this.useSingleInstance = useSingleInstance; + } + + /** + * Returns whether this modular input kind will send events to Splunk as XML (the default and preferred + * value) or plain text. + * + * @return The streaming mode. + */ + public StreamingMode getStreamingMode() { + return streamingMode; + } + + /** + * Specifies whether this modular input kind will send events to Splunk as XML (the default and preferred + * value) or plain text. + * + * @param streamingMode The streaming mode. + */ + public void setStreamingMode(StreamingMode streamingMode) { + this.streamingMode = streamingMode; + } + + /** + * Returns all the arguments to this modular input kind. + * + * @return A list of all the arguments to this modular input kind. + */ + public List<Argument> getArguments() { + return arguments; + } + + /** + * Replaces the current list of arguments with the specified one. + * + * @param arguments The list of arguments with which to replace the current + * list of arguments. + */ + public void setArguments(List<Argument> arguments) { + this.arguments = new ArrayList<>(arguments); + } + + /** + * Appends an argument to the arguments that this modular input kind takes. + * + * @param argument The argument to append to the arguments. + */ + public void addArgument(Argument argument) { + this.arguments.add(argument); + } + + /** + * Generates an XML encoding of this scheme to be passed to Splunk. + * + * @return An {@code org.w3c.dom.Document} object containing the XML of this scheme. + * @throws ParserConfigurationException If there was a problem configuring the XML libraries. + */ + Document toXml() throws ParserConfigurationException { + DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); + Document doc = documentBuilder.newDocument(); + + Element root = doc.createElement("scheme"); + doc.appendChild(root); + + Element title = doc.createElement("title"); + title.appendChild(doc.createTextNode(this.title)); + root.appendChild(title); + + if (this.description != null) { + Element description = doc.createElement("description"); + description.appendChild(doc.createTextNode(this.description)); + root.appendChild(description); + } + + Element useExternalValidation = doc.createElement("use_external_validation"); + useExternalValidation.appendChild(doc.createTextNode(Boolean.toString(this.useExternalValidation))); + root.appendChild(useExternalValidation); + + Element useSingleInstance = doc.createElement("use_single_instance"); + useSingleInstance.appendChild(doc.createTextNode(Boolean.toString(this.useSingleInstance))); + root.appendChild(useSingleInstance); + + Element streamingMode = doc.createElement("streaming_mode"); + streamingMode.appendChild(doc.createTextNode(this.streamingMode == StreamingMode.SIMPLE ? "simple" : "xml")); + root.appendChild(streamingMode); + + Element endpoint = doc.createElement("endpoint"); + root.appendChild(endpoint); + + Element args = doc.createElement("args"); + endpoint.appendChild(args); + + for (Argument arg : this.arguments) { + arg.addToDocument(doc, args); + } + + return doc; + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/SingleValueParameter.java b/splunk/src/main/java/com/splunk/modularinput/SingleValueParameter.java index 2dfd6e7a..6a33c831 100644 --- a/splunk/src/main/java/com/splunk/modularinput/SingleValueParameter.java +++ b/splunk/src/main/java/com/splunk/modularinput/SingleValueParameter.java @@ -1,120 +1,120 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -/** - * The {@code SingleValueParameter} class represents a parameter as part of a modular input instance that - * contains only a single value. This corresponds to XML fragments of the form: - * - * <pre> - * {@code - * <param name="param1">value11</param> - * } - * </pre> - */ -public class SingleValueParameter extends Parameter { - private final String name; - private final String value; - - // Package private by design. - SingleValueParameter(String name, String value) { - this.name = name; - this.value = value; - } - - /** - * @return the name of this parameter. - */ - public String getName() { - return this.name; - } - - /** - * Returns the parameter as found (as a String), without trying to coerce it to another type. - * - * If your field is Boolean or numeric, use {@code getBoolean} or one of {@code getInt}, {@code getLong}, - * {@code getFloat}, and {@code getDouble} instead. - * - * @return The value of this parameter as a String. - */ - public String getValue() { - return this.value; - } - - /** - * Tries to coerce the value of this parameter to a Boolean. A range of values (true, t, on, 1, y, yes) are - * interpreted as {@code true}, and a similar range (false, f, off, 0, no, n) as {@code false}. Everything - * else, including null, results in a {@code MalformedDataException}. - * - * @return The value of this parameter coerced to a Boolean. - * @throws MalformedDataException If the value cannot be coerced to a boolean. - */ - public boolean getBoolean() throws MalformedDataException { - return XmlUtil.normalizeBoolean(getValue()); - } - - /** - * Coerces the value of this field to an int. - * - * @return An int parsed from this parameter's value. - */ - public int getInt() { - return Integer.parseInt(getValue()); - } - - /** - * Coerces the value of this field to a long. - * - * @return A long parsed from this parameter's value. - */ - public long getLong() { - return Long.parseLong(getValue()); - } - - /** - * Coerces the value of this field to a float. - * - * @return A float parsed from this parameter's value. - */ - public float getFloat() { - return Float.parseFloat(getValue()); - } - - /** - * Coerces the value of this field to a double. - * - * @return A double parsed from this parameter's value. - */ - public double getDouble() { - return Double.parseDouble(getValue()); - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof SingleValueParameter that)) { - return false; - } else { - return this.getValue().equals(that.getValue()) && this.getName().equals(that.getName()); - } - } - - @Override - public int hashCode() { - return (this.name == null ? 0 : this.name.hashCode()) ^ - (this.value == null ? 0 : this.value.hashCode()); - } -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +/** + * The {@code SingleValueParameter} class represents a parameter as part of a modular input instance that + * contains only a single value. This corresponds to XML fragments of the form: + * + * <pre> + * {@code + * <param name="param1">value11</param> + * } + * </pre> + */ +public class SingleValueParameter extends Parameter { + private final String name; + private final String value; + + // Package private by design. + SingleValueParameter(String name, String value) { + this.name = name; + this.value = value; + } + + /** + * @return the name of this parameter. + */ + public String getName() { + return this.name; + } + + /** + * Returns the parameter as found (as a String), without trying to coerce it to another type. + * + * If your field is Boolean or numeric, use {@code getBoolean} or one of {@code getInt}, {@code getLong}, + * {@code getFloat}, and {@code getDouble} instead. + * + * @return The value of this parameter as a String. + */ + public String getValue() { + return this.value; + } + + /** + * Tries to coerce the value of this parameter to a Boolean. A range of values (true, t, on, 1, y, yes) are + * interpreted as {@code true}, and a similar range (false, f, off, 0, no, n) as {@code false}. Everything + * else, including null, results in a {@code MalformedDataException}. + * + * @return The value of this parameter coerced to a Boolean. + * @throws MalformedDataException If the value cannot be coerced to a boolean. + */ + public boolean getBoolean() throws MalformedDataException { + return XmlUtil.normalizeBoolean(getValue()); + } + + /** + * Coerces the value of this field to an int. + * + * @return An int parsed from this parameter's value. + */ + public int getInt() { + return Integer.parseInt(getValue()); + } + + /** + * Coerces the value of this field to a long. + * + * @return A long parsed from this parameter's value. + */ + public long getLong() { + return Long.parseLong(getValue()); + } + + /** + * Coerces the value of this field to a float. + * + * @return A float parsed from this parameter's value. + */ + public float getFloat() { + return Float.parseFloat(getValue()); + } + + /** + * Coerces the value of this field to a double. + * + * @return A double parsed from this parameter's value. + */ + public double getDouble() { + return Double.parseDouble(getValue()); + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof SingleValueParameter that)) { + return false; + } else { + return this.getValue().equals(that.getValue()) && this.getName().equals(that.getName()); + } + } + + @Override + public int hashCode() { + return (this.name == null ? 0 : this.name.hashCode()) ^ + (this.value == null ? 0 : this.value.hashCode()); + } +} diff --git a/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java b/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java index ea282db8..72b1be29 100644 --- a/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java +++ b/splunk/src/main/java/com/splunk/modularinput/ValidationDefinition.java @@ -1,238 +1,238 @@ -/* - * Copyright 2013 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk.modularinput; - -import org.w3c.dom.Document; -import org.w3c.dom.Node; -import org.xml.sax.SAXException; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import java.io.IOException; -import java.io.InputStream; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * The {@code ValidationDefinition} class represents the XML sent by Splunk for external validation of a new modular input. - */ -public class ValidationDefinition { - private Map<String, String> metadata; - - private Map<String, Parameter> parameters; - - private static final String serverHostField = "server_host"; - private static final String serverUriField = "server_uri"; - private static final String checkpointDirField = "checkpoint_dir"; - private static final String sessionKeyField = "session_key"; - private static final String nameField = "name"; - - // Package private on purpose. - ValidationDefinition() { - super(); - metadata = new HashMap<>(); - } - - /** - * Sets the name of the server on which this modular input is being run. - * - * @param serverHost The name of the server on which this modular input is being run. - */ - void setServerHost(String serverHost) { - this.metadata.put(serverHostField, serverHost); - } - - /** - * Gets the name of the server on which this modular input is being run. - * - * @return The name of the server on which this modular input is being run. - */ - public String getServerHost() { - return this.metadata.get(serverHostField); - } - - /** - * Sets the URI to reach the server on which this modular input is being run. - * - * @param serverUri The URI to reach the server on which this modular input is being run. - */ - void setServerUri(String serverUri) { - this.metadata.put(serverUriField, serverUri); - } - - /** - * Gets the URI to the server on which this modular input is being run. - * - * @return The URI to the server on which this modular input is being run. - */ - public String getServerUri() { - return this.metadata.get(serverUriField); - } - - /** - * Sets the path to write checkpoint files in. - * - * @param checkpointDir The path to write checkpoint files in. - */ - void setCheckpointDir(String checkpointDir) { - this.metadata.put(checkpointDirField, checkpointDir); - } - - /** - * Gets the path to write checkpoint files for restarting inputs in. - * - * @return The path to write checkpoint files for restarting inputs in. - */ - public String getCheckpointDir() { - return this.metadata.get(checkpointDirField); - } - - /** - * Sets a session key that can be used to access splunkd's REST API. - * - * @param sessionKey A session key that can be used to access splunkd's REST API. - */ - void setSessionKey(String sessionKey) { - this.metadata.put(sessionKeyField, sessionKey); - } - - /** - * Gets a session key providing access to splunkd's REST API on this host. - * - * @return A session key providing access to splunkd's REST API on this host. - */ - public String getSessionKey() { - return this.metadata.get(sessionKeyField); - } - - /** - * Sets the name of the proposed modular input instance. - * - * @param name The name of the proposed modular input instance. - */ - void setName(String name) { - this.metadata.put(nameField, name); - } - - /** - * Gets the name of the proposed modular input instance. - * - * @return The name of the proposed modular input instance. - */ - public String getName() { - return this.metadata.get(nameField); - } - - /** - * Sets a list of {@code Parameter} objects giving the proposed configuration. - * - * @param parameters A list of {@code Parameter} objects giving the proposed configuration. - */ - public void setParameters(Collection<Parameter> parameters) { - Map<String, Parameter> paramMap = new HashMap<>(); - for (Parameter p : parameters) { - paramMap.put(p.getName(), p); - } - this.parameters = paramMap; - } - - /** - * @return The parameters on the proposed input. - */ - public Map<String, Parameter> getParameters() { - return this.parameters; - } - - /** - * Create a ValidationDefinition from a provided stream containing XML. The XML typically will look like - * - * <pre> - * {@code - * <items> - * <server_host>myHost</server_host> - * <server_uri>https://127.0.0.1:8089</server_uri> - * <session_key>123102983109283019283</session_key> - * <checkpoint_dir>/opt/splunk/var/lib/splunk/modinputs</checkpoint_dir> - * <item name="myScheme"> - * <param name="param1">value1</param> - * <param_list name="param2"> - * <value>value2</value> - * <value>value3</value> - * <value>value4</value> - * </param_list> - * </item> - * </items> - * } - * </pre> - * - * @param stream containing XML to parse. - * @return a ValidationDefinition. - * @throws ParserConfigurationException if there are errors in setting up the parser (which indicates system - * configuration issues). - * @throws IOException if there is an error in reading from the stream. - * @throws SAXException when the XML is invalid. - * @throws MalformedDataException when the XML does not meet the required schema. - */ - public static ValidationDefinition parseDefinition(InputStream stream) throws ParserConfigurationException, - IOException, SAXException, MalformedDataException { - DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); - documentBuilderFactory.setIgnoringElementContentWhitespace(true); - documentBuilderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); - documentBuilderFactory.setExpandEntityReferences(false); - documentBuilderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); - documentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", false); - DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); - Document doc = documentBuilder.parse(stream); - - ValidationDefinition definition = new ValidationDefinition(); - for (Node node = doc.getDocumentElement().getFirstChild(); node != null; node = node.getNextSibling()) { - if (node.getNodeType() == Node.TEXT_NODE || node.getNodeType() == Node.COMMENT_NODE) { - continue; - } else if (node.getNodeName().equals("item")) { - String name = node.getAttributes().getNamedItem("name").getNodeValue(); - definition.setName(name); - - List<Parameter> parameter = Parameter.nodeToParameterList(node); - definition.setParameters(parameter); - } else { - definition.metadata.put( - node.getNodeName(), - XmlUtil.textInNode(node, "Expected a text value in element " + node.getNodeName()) - ); - } - } - - return definition; - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof ValidationDefinition that)) { - return false; - } - return this.metadata.equals(that.metadata) && this.parameters.equals(that.parameters); - } - - @Override - public int hashCode() { - return this.metadata.hashCode() ^ (this.parameters == null ? 0 : this.parameters.hashCode()); - } - -} +/* + * Copyright 2013 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk.modularinput; + +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.IOException; +import java.io.InputStream; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * The {@code ValidationDefinition} class represents the XML sent by Splunk for external validation of a new modular input. + */ +public class ValidationDefinition { + private Map<String, String> metadata; + + private Map<String, Parameter> parameters; + + private static final String serverHostField = "server_host"; + private static final String serverUriField = "server_uri"; + private static final String checkpointDirField = "checkpoint_dir"; + private static final String sessionKeyField = "session_key"; + private static final String nameField = "name"; + + // Package private on purpose. + ValidationDefinition() { + super(); + metadata = new HashMap<>(); + } + + /** + * Sets the name of the server on which this modular input is being run. + * + * @param serverHost The name of the server on which this modular input is being run. + */ + void setServerHost(String serverHost) { + this.metadata.put(serverHostField, serverHost); + } + + /** + * Gets the name of the server on which this modular input is being run. + * + * @return The name of the server on which this modular input is being run. + */ + public String getServerHost() { + return this.metadata.get(serverHostField); + } + + /** + * Sets the URI to reach the server on which this modular input is being run. + * + * @param serverUri The URI to reach the server on which this modular input is being run. + */ + void setServerUri(String serverUri) { + this.metadata.put(serverUriField, serverUri); + } + + /** + * Gets the URI to the server on which this modular input is being run. + * + * @return The URI to the server on which this modular input is being run. + */ + public String getServerUri() { + return this.metadata.get(serverUriField); + } + + /** + * Sets the path to write checkpoint files in. + * + * @param checkpointDir The path to write checkpoint files in. + */ + void setCheckpointDir(String checkpointDir) { + this.metadata.put(checkpointDirField, checkpointDir); + } + + /** + * Gets the path to write checkpoint files for restarting inputs in. + * + * @return The path to write checkpoint files for restarting inputs in. + */ + public String getCheckpointDir() { + return this.metadata.get(checkpointDirField); + } + + /** + * Sets a session key that can be used to access splunkd's REST API. + * + * @param sessionKey A session key that can be used to access splunkd's REST API. + */ + void setSessionKey(String sessionKey) { + this.metadata.put(sessionKeyField, sessionKey); + } + + /** + * Gets a session key providing access to splunkd's REST API on this host. + * + * @return A session key providing access to splunkd's REST API on this host. + */ + public String getSessionKey() { + return this.metadata.get(sessionKeyField); + } + + /** + * Sets the name of the proposed modular input instance. + * + * @param name The name of the proposed modular input instance. + */ + void setName(String name) { + this.metadata.put(nameField, name); + } + + /** + * Gets the name of the proposed modular input instance. + * + * @return The name of the proposed modular input instance. + */ + public String getName() { + return this.metadata.get(nameField); + } + + /** + * Sets a list of {@code Parameter} objects giving the proposed configuration. + * + * @param parameters A list of {@code Parameter} objects giving the proposed configuration. + */ + public void setParameters(Collection<Parameter> parameters) { + Map<String, Parameter> paramMap = new HashMap<>(); + for (Parameter p : parameters) { + paramMap.put(p.getName(), p); + } + this.parameters = paramMap; + } + + /** + * @return The parameters on the proposed input. + */ + public Map<String, Parameter> getParameters() { + return this.parameters; + } + + /** + * Create a ValidationDefinition from a provided stream containing XML. The XML typically will look like + * + * <pre> + * {@code + * <items> + * <server_host>myHost</server_host> + * <server_uri>https://127.0.0.1:8089</server_uri> + * <session_key>123102983109283019283</session_key> + * <checkpoint_dir>/opt/splunk/var/lib/splunk/modinputs</checkpoint_dir> + * <item name="myScheme"> + * <param name="param1">value1</param> + * <param_list name="param2"> + * <value>value2</value> + * <value>value3</value> + * <value>value4</value> + * </param_list> + * </item> + * </items> + * } + * </pre> + * + * @param stream containing XML to parse. + * @return a ValidationDefinition. + * @throws ParserConfigurationException if there are errors in setting up the parser (which indicates system + * configuration issues). + * @throws IOException if there is an error in reading from the stream. + * @throws SAXException when the XML is invalid. + * @throws MalformedDataException when the XML does not meet the required schema. + */ + public static ValidationDefinition parseDefinition(InputStream stream) throws ParserConfigurationException, + IOException, SAXException, MalformedDataException { + DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + documentBuilderFactory.setIgnoringElementContentWhitespace(true); + documentBuilderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + documentBuilderFactory.setExpandEntityReferences(false); + documentBuilderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); + documentBuilderFactory.setFeature("http://xml.org/sax/features/external-general-entities", false); + DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); + Document doc = documentBuilder.parse(stream); + + ValidationDefinition definition = new ValidationDefinition(); + for (Node node = doc.getDocumentElement().getFirstChild(); node != null; node = node.getNextSibling()) { + if (node.getNodeType() == Node.TEXT_NODE || node.getNodeType() == Node.COMMENT_NODE) { + continue; + } else if (node.getNodeName().equals("item")) { + String name = node.getAttributes().getNamedItem("name").getNodeValue(); + definition.setName(name); + + List<Parameter> parameter = Parameter.nodeToParameterList(node); + definition.setParameters(parameter); + } else { + definition.metadata.put( + node.getNodeName(), + XmlUtil.textInNode(node, "Expected a text value in element " + node.getNodeName()) + ); + } + } + + return definition; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof ValidationDefinition that)) { + return false; + } + return this.metadata.equals(that.metadata) && this.parameters.equals(that.parameters); + } + + @Override + public int hashCode() { + return this.metadata.hashCode() ^ (this.parameters == null ? 0 : this.parameters.hashCode()); + } + +} diff --git a/splunk/src/test/java/com/splunk/ApplicationTest.java b/splunk/src/test/java/com/splunk/ApplicationTest.java index df8afe2a..80c5b1d9 100644 --- a/splunk/src/test/java/com/splunk/ApplicationTest.java +++ b/splunk/src/test/java/com/splunk/ApplicationTest.java @@ -1,235 +1,235 @@ -/* - * Copyright 2012 Splunk, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"): you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.splunk; - -import org.junit.*; -import org.w3c.dom.Document; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; - -import javax.xml.parsers.DocumentBuilderFactory; -import java.io.*; - -public class ApplicationTest extends SDKTestCase { - private String applicationName; - private Application application; - - @Before - @Override - public void setUp() throws Exception { - super.setUp(); - - removeTestApplications(); - - applicationName = createTemporaryName(); - application = service.getApplications().create(applicationName); - } - - - @After - @Override - public void tearDown() throws Exception { - removeTestApplications(); - - // Clear the restart message that deleting apps causes in splunkd. - // It's fine to keep going despite it. - clearRestartMessage(); - - super.tearDown(); - } - - private void removeTestApplications() { - final EntityCollection<Application> apps = service.getApplications(); - for (Application app : apps.values()) { - final String appName = app.getName(); - if (appName.startsWith("delete-me")) { - app.remove(); - assertEventuallyTrue(new EventuallyTrueBehavior() { - @Override - public boolean predicate() { - apps.refresh(); - return !apps.containsKey(appName); - } - }); - } - } - } - - @Test - public void testForEmptySetup() { - final String setupXml = application.setup().getSetupXml(); - // Newly created applications now has a setup stub. - Assert.assertTrue(setupXml.contains("stub")); - } - - @Test - public void testForSetupPresent() throws Exception { - if (!hasTestData()) { - System.out.println("WARNING: sdk-app-collection not installed in Splunk; skipping test."); - return; - } - installApplicationFromTestData("has_setup_xml"); - Assert.assertTrue(service.getApplications().containsKey("has_setup_xml")); - Application applicationWithSetupXml = service.getApplications().get("has_setup_xml"); - - ApplicationSetup applicationSetup = applicationWithSetupXml.setup(); - Assert.assertEquals("has_setup_xml", applicationSetup.getName()); - Assert.assertFalse(applicationSetup.getRefresh()); - - String setupXml = applicationSetup.getSetupXml(); - Document parsedSetupXml = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse( - new ByteArrayInputStream(setupXml.getBytes("UTF-8"))); - parsedSetupXml.getDocumentElement().normalize(); - - Assert.assertEquals(parsedSetupXml.getDocumentElement().getNodeName(), "SetupInfo"); - - NodeList blocks = parsedSetupXml.getDocumentElement().getElementsByTagName("block"); - Assert.assertEquals(1, blocks.getLength()); - Node block = blocks.item(0); - Assert.assertEquals("block", block.getNodeName()); - } - - @Test - public void testArchive() { - ApplicationArchive archive = application.archive(); - Assert.assertEquals(applicationName, archive.getAppName()); - { - String filePath = archive.getFilePath(); - Assert.assertTrue(filePath.contains("/") || filePath.contains("\\")); - Assert.assertTrue(filePath.endsWith(applicationName + ".spl")); - } - Assert.assertFalse(archive.getRefresh()); - Assert.assertTrue(archive.getUrl() != null); - } - - @Test - public void testFields() { - // Initially, should be empty. - Assert.assertEquals(null, application.getAuthor()); - Assert.assertTrue(application.getCheckForUpdates()); - Assert.assertFalse(application.isConfigured()); - Assert.assertTrue(application.isVisible()); - Assert.assertFalse(application.stateChangeRequiresRestart()); - Assert.assertFalse(application.getRefresh()); - - String authorString = "Boris the mad baboon"; - application.setAuthor(authorString); - application.setCheckForUpdates(false); - String descriptionString = "Meep the nudebranch!"; - application.setDescription(descriptionString); - String labelString = "Hugga wugga"; - application.setLabel(labelString); - String versionString = "VII"; - application.setVersion(versionString); - application.setConfigured(true); - application.setVisible(false); - - application.update(); - application.refresh(); - - Assert.assertEquals(authorString, application.getAuthor()); - Assert.assertFalse(application.getCheckForUpdates()); - Assert.assertEquals(descriptionString, application.getDescription()); - Assert.assertEquals(labelString, application.getLabel()); - Assert.assertEquals(versionString, application.getVersion()); - Assert.assertTrue(application.isConfigured()); - Assert.assertFalse(application.isVisible()); - } - - @Test - public void testUpdate() { - if (service.getApplications().get("wc") == null) { - System.out.println("WARNING: Must have app wc installed on splunkd to run ApplicationTest.testUpdate"); - return; - } - - // Set the version of wc to something small, - // then wait for splunkd to pull its update information from splunkbase. - - Application gettingStarted = service.getApplications().get("wc"); - String originalVersion = gettingStarted.getVersion(); - try { - // Decrease the app's version - gettingStarted.setVersion("0.1"); - gettingStarted.update(); - - // The easiest way to force Splunk to check for new versions of apps - // is to restart it. Otherwise who knows how long it will be... - uncheckedSplunkRestart(); - gettingStarted = service.getApplications().get("wc"); - - // Wait until Splunk sees that an update for the app is available - // NOTE: This typically takes about 15s - final Application gettingStartedReference = gettingStarted; - assertEventuallyTrue(new EventuallyTrueBehavior() { - { tries = 100; } - @Override - public boolean predicate() { - return gettingStartedReference.getUpdate().getChecksum() != null; - } - }); - - // Verify expected properties of the update - ApplicationUpdate update = gettingStarted.getUpdate(); - Assert.assertEquals("315d8e92a0227aa75bbca1b8f33b4970", update.getChecksum()); - Assert.assertEquals("md5", update.getChecksumType()); - Assert.assertEquals("https://apps.splunk.com/app/1541/", update.getHomepage()); - Assert.assertEquals(39879, update.getSize()); - Assert.assertEquals("wc - word count", update.getUpdateName()); - Assert.assertEquals( - "https://apps.splunk.com/app/1541/package/1.0/none/", - update.getAppUrl() - ); - Assert.assertEquals("1.0", update.getVersion()); - Assert.assertFalse(update.isImplicitIdRequired()); - } finally { - // Restore the app's original version - gettingStarted.setVersion(originalVersion); - gettingStarted.update(); - } - } - - @Test - public void testEmptyUpdate() { - ApplicationUpdate update = application.getUpdate(); - Assert.assertNull(update.getChecksum()); - Assert.assertNull(update.getChecksumType()); - Assert.assertNull(update.getHomepage()); - Assert.assertEquals(-1, update.getSize()); - Assert.assertNull(update.getUpdateName()); - Assert.assertNull(update.getAppUrl()); - Assert.assertNull(update.getVersion()); - Assert.assertFalse(update.isImplicitIdRequired()); - } - - @Test - public void testListApplications() { - boolean found = false; - for (Application app : service.getApplications().values()) { - if (app.getName().equals(applicationName)) { - found = true; - } - } - Assert.assertTrue(found); - } - - @Test - public void testContains() { - Assert.assertTrue(service.getApplications().containsKey(applicationName)); - } - -} +/* + * Copyright 2012 Splunk, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"): you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.splunk; + +import org.junit.*; +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +import javax.xml.parsers.DocumentBuilderFactory; +import java.io.*; + +public class ApplicationTest extends SDKTestCase { + private String applicationName; + private Application application; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + + removeTestApplications(); + + applicationName = createTemporaryName(); + application = service.getApplications().create(applicationName); + } + + + @After + @Override + public void tearDown() throws Exception { + removeTestApplications(); + + // Clear the restart message that deleting apps causes in splunkd. + // It's fine to keep going despite it. + clearRestartMessage(); + + super.tearDown(); + } + + private void removeTestApplications() { + final EntityCollection<Application> apps = service.getApplications(); + for (Application app : apps.values()) { + final String appName = app.getName(); + if (appName.startsWith("delete-me")) { + app.remove(); + assertEventuallyTrue(new EventuallyTrueBehavior() { + @Override + public boolean predicate() { + apps.refresh(); + return !apps.containsKey(appName); + } + }); + } + } + } + + @Test + public void testForEmptySetup() { + final String setupXml = application.setup().getSetupXml(); + // Newly created applications now has a setup stub. + Assert.assertTrue(setupXml.contains("stub")); + } + + @Test + public void testForSetupPresent() throws Exception { + if (!hasTestData()) { + System.out.println("WARNING: sdk-app-collection not installed in Splunk; skipping test."); + return; + } + installApplicationFromTestData("has_setup_xml"); + Assert.assertTrue(service.getApplications().containsKey("has_setup_xml")); + Application applicationWithSetupXml = service.getApplications().get("has_setup_xml"); + + ApplicationSetup applicationSetup = applicationWithSetupXml.setup(); + Assert.assertEquals("has_setup_xml", applicationSetup.getName()); + Assert.assertFalse(applicationSetup.getRefresh()); + + String setupXml = applicationSetup.getSetupXml(); + Document parsedSetupXml = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse( + new ByteArrayInputStream(setupXml.getBytes("UTF-8"))); + parsedSetupXml.getDocumentElement().normalize(); + + Assert.assertEquals(parsedSetupXml.getDocumentElement().getNodeName(), "SetupInfo"); + + NodeList blocks = parsedSetupXml.getDocumentElement().getElementsByTagName("block"); + Assert.assertEquals(1, blocks.getLength()); + Node block = blocks.item(0); + Assert.assertEquals("block", block.getNodeName()); + } + + @Test + public void testArchive() { + ApplicationArchive archive = application.archive(); + Assert.assertEquals(applicationName, archive.getAppName()); + { + String filePath = archive.getFilePath(); + Assert.assertTrue(filePath.contains("/") || filePath.contains("\\")); + Assert.assertTrue(filePath.endsWith(applicationName + ".spl")); + } + Assert.assertFalse(archive.getRefresh()); + Assert.assertTrue(archive.getUrl() != null); + } + + @Test + public void testFields() { + // Initially, should be empty. + Assert.assertEquals(null, application.getAuthor()); + Assert.assertTrue(application.getCheckForUpdates()); + Assert.assertFalse(application.isConfigured()); + Assert.assertTrue(application.isVisible()); + Assert.assertFalse(application.stateChangeRequiresRestart()); + Assert.assertFalse(application.getRefresh()); + + String authorString = "Boris the mad baboon"; + application.setAuthor(authorString); + application.setCheckForUpdates(false); + String descriptionString = "Meep the nudebranch!"; + application.setDescription(descriptionString); + String labelString = "Hugga wugga"; + application.setLabel(labelString); + String versionString = "VII"; + application.setVersion(versionString); + application.setConfigured(true); + application.setVisible(false); + + application.update(); + application.refresh(); + + Assert.assertEquals(authorString, application.getAuthor()); + Assert.assertFalse(application.getCheckForUpdates()); + Assert.assertEquals(descriptionString, application.getDescription()); + Assert.assertEquals(labelString, application.getLabel()); + Assert.assertEquals(versionString, application.getVersion()); + Assert.assertTrue(application.isConfigured()); + Assert.assertFalse(application.isVisible()); + } + + @Test + public void testUpdate() { + if (service.getApplications().get("wc") == null) { + System.out.println("WARNING: Must have app wc installed on splunkd to run ApplicationTest.testUpdate"); + return; + } + + // Set the version of wc to something small, + // then wait for splunkd to pull its update information from splunkbase. + + Application gettingStarted = service.getApplications().get("wc"); + String originalVersion = gettingStarted.getVersion(); + try { + // Decrease the app's version + gettingStarted.setVersion("0.1"); + gettingStarted.update(); + + // The easiest way to force Splunk to check for new versions of apps + // is to restart it. Otherwise who knows how long it will be... + uncheckedSplunkRestart(); + gettingStarted = service.getApplications().get("wc"); + + // Wait until Splunk sees that an update for the app is available + // NOTE: This typically takes about 15s + final Application gettingStartedReference = gettingStarted; + assertEventuallyTrue(new EventuallyTrueBehavior() { + { tries = 100; } + @Override + public boolean predicate() { + return gettingStartedReference.getUpdate().getChecksum() != null; + } + }); + + // Verify expected properties of the update + ApplicationUpdate update = gettingStarted.getUpdate(); + Assert.assertEquals("315d8e92a0227aa75bbca1b8f33b4970", update.getChecksum()); + Assert.assertEquals("md5", update.getChecksumType()); + Assert.assertEquals("https://apps.splunk.com/app/1541/", update.getHomepage()); + Assert.assertEquals(39879, update.getSize()); + Assert.assertEquals("wc - word count", update.getUpdateName()); + Assert.assertEquals( + "https://apps.splunk.com/app/1541/package/1.0/none/", + update.getAppUrl() + ); + Assert.assertEquals("1.0", update.getVersion()); + Assert.assertFalse(update.isImplicitIdRequired()); + } finally { + // Restore the app's original version + gettingStarted.setVersion(originalVersion); + gettingStarted.update(); + } + } + + @Test + public void testEmptyUpdate() { + ApplicationUpdate update = application.getUpdate(); + Assert.assertNull(update.getChecksum()); + Assert.assertNull(update.getChecksumType()); + Assert.assertNull(update.getHomepage()); + Assert.assertEquals(-1, update.getSize()); + Assert.assertNull(update.getUpdateName()); + Assert.assertNull(update.getAppUrl()); + Assert.assertNull(update.getVersion()); + Assert.assertFalse(update.isImplicitIdRequired()); + } + + @Test + public void testListApplications() { + boolean found = false; + for (Application app : service.getApplications().values()) { + if (app.getName().equals(applicationName)) { + found = true; + } + } + Assert.assertTrue(found); + } + + @Test + public void testContains() { + Assert.assertTrue(service.getApplications().containsKey(applicationName)); + } + +} diff --git a/splunk/src/test/java/com/splunk/AtomFeedTest.java b/splunk/src/test/java/com/splunk/AtomFeedTest.java index e534e21a..e334794d 100644 --- a/splunk/src/test/java/com/splunk/AtomFeedTest.java +++ b/splunk/src/test/java/com/splunk/AtomFeedTest.java @@ -90,8 +90,8 @@ public void testAtomFeed() { @Parameterized.Parameters(name="{0}") public static Collection<Object[]> testCases() { - Collection<Object[]> cases = new ArrayList<Object[]>(); - for (String key : (Set<String>)expectedData.keySet()) { + Collection<Object[]> cases = new ArrayList<>(); + for (String key : expectedData.keySet()) { cases.add(new Object[] { key }); } return cases; diff --git a/splunk/src/test/java/com/splunk/CookieTest.java b/splunk/src/test/java/com/splunk/CookieTest.java index d11cda76..d23f3baf 100644 --- a/splunk/src/test/java/com/splunk/CookieTest.java +++ b/splunk/src/test/java/com/splunk/CookieTest.java @@ -224,7 +224,7 @@ public void testHttpServiceWithNoCookie() { } private Map<String, Object> getStandardArgs() { - Map<String, Object> args = new HashMap<String, Object>(); + Map<String, Object> args = new HashMap<>(); args.put("host", (String)command.opts.get("host")); args.put("port", (Integer) command.opts.get("port")); diff --git a/splunk/src/test/java/com/splunk/DataModelTest.java b/splunk/src/test/java/com/splunk/DataModelTest.java index 2a8585e8..4a16ee2b 100644 --- a/splunk/src/test/java/com/splunk/DataModelTest.java +++ b/splunk/src/test/java/com/splunk/DataModelTest.java @@ -443,7 +443,7 @@ public void testCalculations() { Assert.assertEquals("", lc.getComment()); Assert.assertEquals(true, lc.isEditable()); List<LookupDataModelCalculation.LookupFieldMapping> expectedFieldMappings = - new ArrayList<LookupDataModelCalculation.LookupFieldMapping>(); + new ArrayList<>(); expectedFieldMappings.add(new LookupDataModelCalculation.LookupFieldMapping() {{ inputField = "host"; lookupField = "a_lookup_field"; diff --git a/splunk/src/test/java/com/splunk/DeploymentServerTest.java b/splunk/src/test/java/com/splunk/DeploymentServerTest.java index ba75489f..ee8850a6 100644 --- a/splunk/src/test/java/com/splunk/DeploymentServerTest.java +++ b/splunk/src/test/java/com/splunk/DeploymentServerTest.java @@ -29,7 +29,7 @@ public void testDeploymentServer() throws Exception { EntityCollection<DeploymentServer> deploymentServers = service.getDeploymentServers(); - if (deploymentServers.values().size() == 0) { + if (deploymentServers.values().isEmpty()) { System.out.println("WARNING: No DeploymentServer entities to test"); return; } diff --git a/splunk/src/test/java/com/splunk/DeploymentTenantTest.java b/splunk/src/test/java/com/splunk/DeploymentTenantTest.java index 153b1064..c900bf8a 100644 --- a/splunk/src/test/java/com/splunk/DeploymentTenantTest.java +++ b/splunk/src/test/java/com/splunk/DeploymentTenantTest.java @@ -29,7 +29,7 @@ public void testDeploymentTenant() throws Exception { EntityCollection<DeploymentTenant> deploymentTenants = service.getDeploymentTenants(); - if (deploymentTenants.values().size() == 0) { + if (deploymentTenants.values().isEmpty()) { System.out.println("WARNING: No DeploymentTenant entities to test"); return; } diff --git a/splunk/src/test/java/com/splunk/EventTypesTest.java b/splunk/src/test/java/com/splunk/EventTypesTest.java index 5e98e860..81e16bc7 100644 --- a/splunk/src/test/java/com/splunk/EventTypesTest.java +++ b/splunk/src/test/java/com/splunk/EventTypesTest.java @@ -54,7 +54,7 @@ public void tearDown() throws Exception { @Test public void testList() { EntityCollection<EventType> eventTypes = service.getEventTypes(); - Assert.assertFalse("No event types in system.", eventTypes.size() == 0); + Assert.assertFalse("No event types in system.", eventTypes.isEmpty()); for (EventType eventType : eventTypes.values()) { eventType.getDescription(); diff --git a/splunk/src/test/java/com/splunk/ExportResultsReaderTest.java b/splunk/src/test/java/com/splunk/ExportResultsReaderTest.java index 0c460372..b3367aae 100644 --- a/splunk/src/test/java/com/splunk/ExportResultsReaderTest.java +++ b/splunk/src/test/java/com/splunk/ExportResultsReaderTest.java @@ -52,8 +52,8 @@ public ExportResultsReaderTest(String version) { @Parameterized.Parameters(name="from version {0}") public static Collection<Object[]> testCases() { - Collection<Object[]> cases = new ArrayList<Object[]>(); - for (String version : (Set<String>)expectedData.keySet()) { + Collection<Object[]> cases = new ArrayList<>(); + for (String version : expectedData.keySet()) { cases.add(new Object[] {version}); } return cases; diff --git a/splunk/src/test/java/com/splunk/HttpServiceTest.java b/splunk/src/test/java/com/splunk/HttpServiceTest.java index 283f1b99..199f7863 100644 --- a/splunk/src/test/java/com/splunk/HttpServiceTest.java +++ b/splunk/src/test/java/com/splunk/HttpServiceTest.java @@ -117,9 +117,10 @@ public void testSSLSocketFactorySetNull(){ public void testSSLSocketFactory() { try { SSLSocketFactory factory = Service.getSSLSocketFactory(); - SSLSocket socket = (SSLSocket) factory.createSocket((String)command.opts.get("host"), 8089); - String[] protocols = socket.getEnabledProtocols(); - Assert.assertTrue(protocols.length > 0); + try (SSLSocket socket = (SSLSocket) factory.createSocket((String)command.opts.get("host"), 8089)) { + String[] protocols = socket.getEnabledProtocols(); + Assert.assertTrue(protocols.length > 0); + } } catch (Exception e) { Assert.assertNull(e); diff --git a/splunk/src/test/java/com/splunk/IndexTest.java b/splunk/src/test/java/com/splunk/IndexTest.java index 56d6670a..cd6f06a8 100644 --- a/splunk/src/test/java/com/splunk/IndexTest.java +++ b/splunk/src/test/java/com/splunk/IndexTest.java @@ -478,7 +478,7 @@ public boolean predicate() { @Test public void testSubmitOneWithNamespacedService() { - Map<String, Object> opts = new HashMap<String, Object>(command.opts); + Map<String, Object> opts = new HashMap<>(command.opts); opts.put("app", "search"); final Service service = Service.connect(opts); Assert.assertNotNull(service); @@ -859,8 +859,7 @@ private int getResultCountOfIndex(Service s) { } private int getResultCountOfIndex(Service s, String indexName) { - InputStream results = s.oneshotSearch("search index=" + indexName); - try { + try (InputStream results = s.oneshotSearch("search index=" + indexName)) { ResultsReaderXml resultsReader = new ResultsReaderXml(results); int numEvents = 0; diff --git a/splunk/src/test/java/com/splunk/LicenseMessageTest.java b/splunk/src/test/java/com/splunk/LicenseMessageTest.java index 3614e0ac..629973a2 100644 --- a/splunk/src/test/java/com/splunk/LicenseMessageTest.java +++ b/splunk/src/test/java/com/splunk/LicenseMessageTest.java @@ -28,7 +28,7 @@ public void testLicenseMessage() throws Exception { EntityCollection<LicenseMessage> licenseMessages = service.getLicenseMessages(); - if (licenseMessages.values().size() == 0) { + if (licenseMessages.values().isEmpty()) { System.out.println("WARNING: No license messages found to test."); return; } diff --git a/splunk/src/test/java/com/splunk/ModularInputKindsTest.java b/splunk/src/test/java/com/splunk/ModularInputKindsTest.java index 8c7d7fff..434f374c 100644 --- a/splunk/src/test/java/com/splunk/ModularInputKindsTest.java +++ b/splunk/src/test/java/com/splunk/ModularInputKindsTest.java @@ -115,7 +115,7 @@ public void testArgDescription() { ModularInputKindArgument arg; - Map<String,String> expectedValues = new HashMap<String,String>(); + Map<String,String> expectedValues = new HashMap<>(); expectedValues.put("key_id", "The key of the system"); expectedValues.put("no_description", null); expectedValues.put("empty_description", null); @@ -143,7 +143,7 @@ public void testArgDataType() { ModularInputKindArgument arg; Map<String,ModularInputKindArgument.Type> expectedValues = - new HashMap<String,ModularInputKindArgument.Type>(); + new HashMap<>(); expectedValues.put("number_field", ModularInputKindArgument.Type.NUMBER); expectedValues.put("boolean_field", ModularInputKindArgument.Type.BOOLEAN); expectedValues.put("string_field", ModularInputKindArgument.Type.STRING); @@ -168,7 +168,7 @@ public void testRequiredOnCreate() { ModularInputKindArgument arg; - Map<String,Boolean> expectedValues = new HashMap<String,Boolean>(); + Map<String,Boolean> expectedValues = new HashMap<>(); expectedValues.put("required_on_create", true); expectedValues.put("not_required_on_create", false); @@ -192,7 +192,7 @@ public void testRequiredOnEdit() { ModularInputKindArgument arg; - Map<String,Boolean> expectedValues = new HashMap<String,Boolean>(); + Map<String,Boolean> expectedValues = new HashMap<>(); expectedValues.put("arg_required_on_edit", true); expectedValues.put("not_required_on_edit", false); @@ -215,7 +215,7 @@ public void testGetArguments() { ModularInputKind test1 = inputKinds.get("test1"); Map<String, ModularInputKindArgument> args = test1.getArguments(); - Set<String> expectedKeys = new HashSet<String>(); + Set<String> expectedKeys = new HashSet<>(); expectedKeys.add("name"); expectedKeys.add("resname"); expectedKeys.add("key_id"); diff --git a/splunk/src/test/java/com/splunk/OutputGroupTest.java b/splunk/src/test/java/com/splunk/OutputGroupTest.java index e850dd13..3ea25e11 100644 --- a/splunk/src/test/java/com/splunk/OutputGroupTest.java +++ b/splunk/src/test/java/com/splunk/OutputGroupTest.java @@ -24,7 +24,7 @@ public class OutputGroupTest extends SDKTestCase { public void testOutputGroup() throws Exception { EntityCollection<OutputGroup> outputGroups = service.getOutputGroups(); - if (outputGroups.values().size() == 0) { + if (outputGroups.values().isEmpty()) { System.out.println("WARNING: No OutputGroups to test"); return; } diff --git a/splunk/src/test/java/com/splunk/OutputServerTest.java b/splunk/src/test/java/com/splunk/OutputServerTest.java index 24fb6aa2..03fe0ef3 100644 --- a/splunk/src/test/java/com/splunk/OutputServerTest.java +++ b/splunk/src/test/java/com/splunk/OutputServerTest.java @@ -24,7 +24,7 @@ public void testOutputServer() throws Exception { EntityCollection<OutputServer> outputServers = service.getOutputServers(); - if (outputServers.values().size() == 0) { + if (outputServers.values().isEmpty()) { System.out.println("WARNING: No OutputServers to test"); return; } diff --git a/splunk/src/test/java/com/splunk/OutputSyslogTest.java b/splunk/src/test/java/com/splunk/OutputSyslogTest.java index 1ca28adb..e0b5df28 100644 --- a/splunk/src/test/java/com/splunk/OutputSyslogTest.java +++ b/splunk/src/test/java/com/splunk/OutputSyslogTest.java @@ -24,7 +24,7 @@ public class OutputSyslogTest extends SDKTestCase { public void testOutputSyslog() throws Exception { EntityCollection<OutputSyslog> dos = service.getOutputSyslogs(); - if (dos.values().size() == 0) { + if (dos.values().isEmpty()) { System.out.println("WARNING: No OutputSyslogs to test"); return; } diff --git a/splunk/src/test/java/com/splunk/PasswordTest.java b/splunk/src/test/java/com/splunk/PasswordTest.java index 3d32d393..fb03f264 100644 --- a/splunk/src/test/java/com/splunk/PasswordTest.java +++ b/splunk/src/test/java/com/splunk/PasswordTest.java @@ -133,7 +133,7 @@ public void testPasswordsCompatibleGetByName() { } @Test public void testPasswordsWithWildCards(){ - HashMap<String, Object> args = new HashMap<String, Object>(); + HashMap<String, Object> args = new HashMap<>(); args = Command.defaultValues; args.put("owner", "-"); args.put("app", "-"); diff --git a/splunk/src/test/java/com/splunk/ReceiverTest.java b/splunk/src/test/java/com/splunk/ReceiverTest.java index e171b9d1..6852f8c3 100644 --- a/splunk/src/test/java/com/splunk/ReceiverTest.java +++ b/splunk/src/test/java/com/splunk/ReceiverTest.java @@ -70,30 +70,22 @@ public void testReceiver(Service passedService) { final int versionCompare = passedService.versionCompare("6.0.0"); final String osName = passedService.getInfo().getOsName(); - try { - Socket socket1 = receiver.attach(); - OutputStream stream = socket1.getOutputStream(); - + try ( + Socket socket1 = receiver.attach(); + OutputStream stream = socket1.getOutputStream(); + ) { String s = createTimestamp() + " Boris the mad baboon1!\r\n"; stream.write(s.getBytes("UTF-8")); - // Splunk won't deterministically index these events until the socket is closed or greater than 1MB - // has been written. - stream.close(); - socket1.close(); } catch (IOException e) { Assert.fail("Exception on attach"); } - try { - Socket socket1 = receiver.attach(Args.create("sourcetype", "mysourcetype")); - OutputStream stream = socket1.getOutputStream(); - + try ( + Socket socket1 = receiver.attach(Args.create("sourcetype", "mysourcetype")); + OutputStream stream = socket1.getOutputStream(); + ) { String s = createTimestamp() + " Boris the mad baboon2!\r\n"; stream.write(s.getBytes("UTF-8")); - // Splunk won't deterministically index these events until the socket is closed or greater than 1MB - // has been written. - stream.close(); - socket1.close(); } catch (IOException e) { Assert.fail("Exception on attach"); } diff --git a/splunk/src/test/java/com/splunk/ResultsReaderTest.java b/splunk/src/test/java/com/splunk/ResultsReaderTest.java index 71ebe42d..6d346984 100644 --- a/splunk/src/test/java/com/splunk/ResultsReaderTest.java +++ b/splunk/src/test/java/com/splunk/ResultsReaderTest.java @@ -41,7 +41,7 @@ public void testReadCsv() throws Exception { Assert.assertEquals("sum(kb)", fields[0]); Assert.assertEquals("series", fields[1]); - Map<String, String> expected = new HashMap<String, String>(); + Map<String, String> expected = new HashMap<>(); expected.put("series", "twitter"); expected.put("sum(kb)", "14372242.758775"); @@ -65,7 +65,7 @@ public void testReadCsvFromOneshot() throws Exception { "search index=_internal | head 1 | stats count", Args.create("output_mode", "csv")); ResultsReaderCsv reader = new ResultsReaderCsv(input); - Map<String, String> expected = new HashMap<String, String>(); + Map<String, String> expected = new HashMap<>(); expected.put("count", "1"); assertNextEventEquals(expected, reader); @@ -124,7 +124,7 @@ private MultiResultsReader getExportStreamXml() throws IOException { public void testReadJsonOnSplunk4() throws Exception { InputStream input = openResource("/results4.json"); ResultsReaderJson reader = new ResultsReaderJson(input); - Map<String, String> expected = new HashMap<String, String>(); + Map<String, String> expected = new HashMap<>(); expected.put("series", "twitter"); expected.put("sum(kb)", "14372242.758775"); @@ -148,7 +148,7 @@ public void testReadJsonOnSplunk5() throws Exception { // from Splunk 4.3. InputStream input = openResource("/results5.json"); ResultsReaderJson reader = new ResultsReaderJson(input); - Map<String, String> expected = new HashMap<String, String>(); + Map<String, String> expected = new HashMap<>(); expected.put("series", "twitter"); expected.put("sum(kb)", "14372242.758775"); diff --git a/splunk/src/test/java/com/splunk/ResultsReaderTestFromExpectedFile.java b/splunk/src/test/java/com/splunk/ResultsReaderTestFromExpectedFile.java index ecfa0d14..8c527eee 100644 --- a/splunk/src/test/java/com/splunk/ResultsReaderTestFromExpectedFile.java +++ b/splunk/src/test/java/com/splunk/ResultsReaderTestFromExpectedFile.java @@ -75,11 +75,11 @@ public ResultsReaderTestFromExpectedFile(String version, String testName) { @Parameterized.Parameters(name="{1} from version {0}") public static Collection<Object[]> testCases() { - Collection<Object[]> cases = new ArrayList<Object[]>(); - for (String version : (Set<String>)expectedData.keySet()) { + Collection<Object[]> cases = new ArrayList<>(); + for (String version : expectedData.keySet()) { Map<String, Object> casesForThisVersion = (Map<String, Object>)expectedData.get(version); - for (String testName : (Set<String>)casesForThisVersion.keySet()) { + for (String testName : casesForThisVersion.keySet()) { cases.add(new Object[] {version, testName}); } } @@ -141,9 +141,9 @@ static void verifyResultsReader( assertEquals(expectedKeys, foundEvent.keySet()); for (String key : expectedFields.keySet()) { assertTrue(foundEvent.containsKey(key)); - if (expectedFields.get(key) instanceof List) { + if (expectedFields.get(key) instanceof List value) { assertEquals( - expectedFields.get(key), + value, Arrays.asList(foundEvent.getArray(key))); } else { assertEquals(expectedFields.get(key), foundEvent.get(key)); diff --git a/splunk/src/test/java/com/splunk/SDKTestCase.java b/splunk/src/test/java/com/splunk/SDKTestCase.java index c2c623fa..349e5929 100644 --- a/splunk/src/test/java/com/splunk/SDKTestCase.java +++ b/splunk/src/test/java/com/splunk/SDKTestCase.java @@ -51,7 +51,7 @@ public abstract class SDKTestCase { protected Command command; - public static String streamToString(java.io.InputStream is) { + public static String streamToString(InputStream is) { Reader r = null; try { r = new InputStreamReader(is, "UTF-8"); @@ -114,7 +114,7 @@ public void setUp() throws Exception { splunkRestart(); System.out.println("Restart complete."); } - installedApps = new ArrayList<String>(); + installedApps = new ArrayList<>(); } @After @@ -349,8 +349,7 @@ protected int findNextUnusedPort(int startingPort) { } public boolean isPortInUse(int port) { - try { - Socket pingSocket = new Socket(); + try (Socket pingSocket = new Socket()) { // On Windows, the firewall doesn't respond at all if you connect to an unbound port, so we need to // take lack of a connection as an empty port. Timeout is 1000ms. try { @@ -358,7 +357,6 @@ public boolean isPortInUse(int port) { } catch (SocketTimeoutException ste) { return false; } - pingSocket.close(); if (VERBOSE_PORT_SCAN) { System.out.println("IN-USE(" + port + ")"); } @@ -414,20 +412,20 @@ else if (osName.equals("Darwin")) { } protected boolean firstLineIsXmlDtd(InputStream stream) { - InputStreamReader reader; - try { - reader = new InputStreamReader(stream, "UTF-8"); + try (InputStreamReader reader = new InputStreamReader(stream, "UTF-8"); + BufferedReader lineReader = new BufferedReader(reader)) { + try { + return "<?xml version=\"1.0\" encoding=\"UTF-8\"?>".equals( + lineReader.readLine() + ); + } catch (IOException e) { + Assert.fail(e.toString()); + return false; + } } catch (UnsupportedEncodingException e) { throw new Error(e); - } - BufferedReader lineReader = new BufferedReader(reader); - try { - return "<?xml version=\"1.0\" encoding=\"UTF-8\"?>".equals( - lineReader.readLine() - ); } catch (IOException e) { - Assert.fail(e.toString()); - return false; + throw new RuntimeException(e); } } } diff --git a/splunk/src/test/java/com/splunk/SavedSearchTest.java b/splunk/src/test/java/com/splunk/SavedSearchTest.java index e62a6294..86b4d697 100644 --- a/splunk/src/test/java/com/splunk/SavedSearchTest.java +++ b/splunk/src/test/java/com/splunk/SavedSearchTest.java @@ -480,7 +480,7 @@ public void testHistoryWithArgs(){ Assert.assertEquals(30, savedSearch.history().length); //history with argument 'count' set to '0' i.e it returns the whole history - HashMap<String, Object> args = new HashMap<String, Object>(); + HashMap<String, Object> args = new HashMap<>(); args.put("count", 0); Assert.assertEquals(31, savedSearch.history(args).length); diff --git a/splunk/src/test/java/com/splunk/SearchJobTest.java b/splunk/src/test/java/com/splunk/SearchJobTest.java index fbb7cbce..3169172d 100644 --- a/splunk/src/test/java/com/splunk/SearchJobTest.java +++ b/splunk/src/test/java/com/splunk/SearchJobTest.java @@ -231,7 +231,7 @@ public void testJobHasNoSgByDefault() throws IOException { public void testJobCanEnableSg() throws IOException { Job job = service.getJobs().create("search index=_internal GET | head 3"); waitUntilDone(job); - Map<String, String> args = new HashMap<String, String>(); + Map<String, String> args = new HashMap<>(); args.put("segmentation", "raw"); String data = streamToString(job.getResults(args)); Assert.assertTrue(data.contains("<sg")); @@ -1031,15 +1031,14 @@ public boolean predicate() { private String inputStreamToString(InputStream stream) { try { StringBuilder b = new StringBuilder(); - BufferedReader reader = new BufferedReader( - new InputStreamReader(stream, "UTF-8") - ); - String tmp; - while ((tmp = reader.readLine()) != null) { - b.append(tmp + "\n"); + try (BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "UTF-8"));) { + String tmp; + while ((tmp = reader.readLine()) != null) { + b.append(tmp + "\n"); + } + return b.toString(); } - return b.toString(); } catch (IOException e) { Assert.fail(e.toString()); return null; diff --git a/splunk/src/test/java/com/splunk/ServiceTest.java b/splunk/src/test/java/com/splunk/ServiceTest.java index ac8076ad..74427442 100644 --- a/splunk/src/test/java/com/splunk/ServiceTest.java +++ b/splunk/src/test/java/com/splunk/ServiceTest.java @@ -155,7 +155,7 @@ public void testServiceWithCustomHeaders() { args.setScheme((String) command.opts.get("scheme")); args.setUsername((String) command.opts.get("username")); args.setPassword((String) command.opts.get("password")); - args.setHttpHeaders(new HashMap<String, String>() {{ + args.setHttpHeaders(new HashMap<>() {{ put("some header key", "some value"); }}); Service service = new Service(args); @@ -706,7 +706,7 @@ public void testDelete() { @Test public void testPost() { - HashMap<String, Object> args = new HashMap<String, Object>(); + HashMap<String, Object> args = new HashMap<>(); args.put("foo", "bar"); ResponseMessage response; diff --git a/splunk/src/test/java/com/splunk/UtilTest.java b/splunk/src/test/java/com/splunk/UtilTest.java index 0634e8c1..5f245bb8 100644 --- a/splunk/src/test/java/com/splunk/UtilTest.java +++ b/splunk/src/test/java/com/splunk/UtilTest.java @@ -25,14 +25,14 @@ public class UtilTest extends SDKTestCase { @Test public void testJoin() { - List<String> emptyList = new ArrayList<String>(); + List<String> emptyList = new ArrayList<>(); Assert.assertEquals("", Util.join("/", emptyList)); - List<String> oneElementList = new ArrayList<String>(); + List<String> oneElementList = new ArrayList<>(); oneElementList.add("abcd"); Assert.assertEquals("abcd", Util.join("/", oneElementList)); - List<String> fullList = new ArrayList<String>(); + List<String> fullList = new ArrayList<>(); fullList.add("abcd"); fullList.add("defg"); Assert.assertEquals( diff --git a/splunk/src/test/java/com/splunk/modularinput/InputDefinitionTest.java b/splunk/src/test/java/com/splunk/modularinput/InputDefinitionTest.java index aa9fe324..60131230 100644 --- a/splunk/src/test/java/com/splunk/modularinput/InputDefinitionTest.java +++ b/splunk/src/test/java/com/splunk/modularinput/InputDefinitionTest.java @@ -45,14 +45,14 @@ public void testParseStreamWithThreeInputs() throws ParserConfigurationException expectedDefinition.setCheckpointDir("/some/dir"); expectedDefinition.setSessionKey("123102983109283019283"); - List<Parameter> parameters = new ArrayList<Parameter>(); + List<Parameter> parameters = new ArrayList<>(); parameters.add(new SingleValueParameter("param1", "value1")); parameters.add(new SingleValueParameter("param2", "value2")); parameters.add(new SingleValueParameter("disabled", "0")); parameters.add(new SingleValueParameter("index", "default")); expectedDefinition.addInput("foobar://aaa", parameters); - parameters = new ArrayList<Parameter>(); + parameters = new ArrayList<>(); parameters.add(new SingleValueParameter("param1", "value11")); parameters.add(new SingleValueParameter("param2", "value22")); parameters.add(new SingleValueParameter("disabled", "0")); @@ -78,8 +78,7 @@ public void testParseStreamWithThreeInputs() throws ParserConfigurationException */ @Test public void testParseMalformedInputDefinition() throws ParserConfigurationException, SAXException, IOException { - try { - InputStream stream = SDKTestCase.openResource("/modularinput/data/conf_with_invalid_inputs.xml"); + try (InputStream stream = SDKTestCase.openResource("/modularinput/data/conf_with_invalid_inputs.xml")) { InputDefinition foundDefinition = InputDefinition.parseDefinition(stream); } catch (MalformedDataException e) { Assert.assertTrue(true); diff --git a/splunk/src/test/java/com/splunk/modularinput/ModularInputTestCase.java b/splunk/src/test/java/com/splunk/modularinput/ModularInputTestCase.java index ef0a7c92..b8abfd9d 100644 --- a/splunk/src/test/java/com/splunk/modularinput/ModularInputTestCase.java +++ b/splunk/src/test/java/com/splunk/modularinput/ModularInputTestCase.java @@ -33,11 +33,11 @@ public static void removeBlankTextNodes(Element node) { // Iterate backwards through the collection since we're going to be removing elements for (int i = children.getLength() - 1; i >= 0; i--) { Node child = children.item(i); - if (child instanceof Text && ((Text)child).getData().trim().length() == 0) { + if (child instanceof Text txt && txt.getData().isBlank()) { node.removeChild(child); } - else if (child instanceof Element) { - removeBlankTextNodes((Element) child); + else if (child instanceof Element elem) { + removeBlankTextNodes(elem); } } } @@ -48,8 +48,8 @@ else if (child instanceof Element) { * * @param expected an org.w3c.dom.Node object containing the expected XML document. * @param found an org.w3c.dom.Node object containing the XML document actually produced. - * @throws javax.xml.transform.TransformerException - * @throws javax.xml.parsers.ParserConfigurationException + * @throws TransformerException + * @throws ParserConfigurationException */ public void assertXmlEqual(Node expected, Node found) throws TransformerException, ParserConfigurationException { try { @@ -79,8 +79,8 @@ public void assertXmlEqual(Node expected, Node found) throws TransformerExceptio * * @param expected an org.w3c.dom.Document object containing the expected XML document. * @param found an org.w3c.dom.Document object containing the XML document actually produced. - * @throws javax.xml.transform.TransformerException - * @throws javax.xml.parsers.ParserConfigurationException + * @throws TransformerException + * @throws ParserConfigurationException */ public void assertXmlEqual(Document expected, Document found) throws TransformerException, ParserConfigurationException { removeBlankTextNodes(expected.getDocumentElement()); @@ -107,8 +107,7 @@ public Document resourceToXmlDocument(String path) { throw new AssertionError("Parser configuration failed: " + e.toString()); } - InputStream resource = SDKTestCase.openResource(path); - try { + try (InputStream resource = SDKTestCase.openResource(path)) { Document doc = documentBuilder.parse(resource); return doc; } catch (SAXException e) { diff --git a/splunk/src/test/java/com/splunk/modularinput/ValidationDefinitionTest.java b/splunk/src/test/java/com/splunk/modularinput/ValidationDefinitionTest.java index 6110c04e..a515a13e 100644 --- a/splunk/src/test/java/com/splunk/modularinput/ValidationDefinitionTest.java +++ b/splunk/src/test/java/com/splunk/modularinput/ValidationDefinitionTest.java @@ -30,7 +30,7 @@ public void testParseValidationDefinition() throws ParserConfigurationException, expected.setCheckpointDir("/opt/splunk/var/lib/splunk/modinputs"); expected.setSessionKey("123102983109283019283"); expected.setName("aaa"); - List<Parameter> parameters = new ArrayList<Parameter>(); + List<Parameter> parameters = new ArrayList<>(); parameters.add(new SingleValueParameter("param1", "value1")); parameters.add(new SingleValueParameter("param2", "value2")); parameters.add(new SingleValueParameter("disabled", "0")); diff --git a/splunk/src/test/java/com/splunk/splunk.license.xml b/splunk/src/test/java/com/splunk/splunk.license.xml new file mode 100644 index 00000000..23f09657 --- /dev/null +++ b/splunk/src/test/java/com/splunk/splunk.license.xml @@ -0,0 +1,33 @@ +<?xml version="1.0" encoding="UTF-8"?> +<license> + <signature>MF5kKJJZo5PFE9A8YIxMo5L+7kIHzFkGMweNLaLXbiNI2OdP9ZazXDcC8/ioswEVXA2IWj14q6TKK9gQB+2VO5TakyFF45QgaWGJwuGtq6hYVIPiwMSY7SnlHFys30nzE7cQ0V1EwMDL03pMvM3YzbvbpNHcm92riGoAtj01QMbBm1KE/oxdR+XiYnFqFNNlUMVLwtygarUdaJNmy15lOTQdHsvW7Cf3F7gKb12NPUi3xBg1PApsc3O3pfQYsFTzXrJC+Fwfrx5P/0ha+bwrSN3idmGiIg1FiPVoTIiQdxyRTFhKYV6ly2eDPSxj4TnAwnp8HDBNNqHOZ9LYpRF/kA==</signature> + <payload> + <type>enterprise</type> + <group_id>Enterprise</group_id> + <quota>1048576</quota> + <max_violations>5</max_violations> + <window_period>30</window_period> + <creation_time>1321914737</creation_time> + <label>Splunk Developer Personal License NOT FOR RESALE</label> + <expiration_time>9961828337</expiration_time> + <features> + <feature>Auth</feature> + <feature>FwdData</feature> + <feature>RcvData</feature> + <feature>LocalSearch</feature> + <feature>DistSearch</feature> + <feature>RcvSearch</feature> + <feature>ScheduledSearch</feature> + <feature>Alerting</feature> + <feature>DeployClient</feature> + <feature>DeployServer</feature> + <feature>SplunkWeb</feature> + <feature>SigningProcessor</feature> + <feature>SyslogOutputProcessor</feature> + <feature>AllowDuplicateKeys</feature> + <feature>CanBeRemoteMaster</feature> + </features> + <sourcetypes/> + <guid>AD1D800A-04CC-4ED2-9754-85335ECEA6E3</guid> + </payload> +</license> \ No newline at end of file diff --git a/splunk/src/test/java/com/splunk/splunk_at_least_cupcake.license.xml b/splunk/src/test/java/com/splunk/splunk_at_least_cupcake.license.xml new file mode 100644 index 00000000..51828bbd --- /dev/null +++ b/splunk/src/test/java/com/splunk/splunk_at_least_cupcake.license.xml @@ -0,0 +1,33 @@ +<?xml version="1.0" encoding="UTF-8"?> +<license> + <signature>dTEDyZNMuiTE7Nu1Y3ZUr8l9pbGaX/RRo/eAhsGBHuo23VyYzaiyHnPHyr8IPL7d3opT/4EBfrCNI4HGwN1EOdiES9mt91fDQvEFx8f+p+RJnlFJfRQqbbWZWWjqxfJvYIlBc9GDiZ0xSo+Bc+DuYdlkG4f+WywHIH/k9HN6snqCxVojRJwiJAGjDn5FmUVIcaKCF84MaRVFPmRlicNzQ6pYvQjimPaUHNoP5NB9rgWg4ehaZ+bfR2AcjpgSBNiJOwKRI9EdNLA7GPXphcujyKynl45RSWZmqd5vQMHIH4a2BTeK+0QwPoJni4CUtN19yUFUNOiXHjx6Aunjw9qVtA==</signature> + <payload> + <type>enterprise</type> + <group_id>Enterprise</group_id> + <quota>104857600</quota> + <max_violations>5</max_violations> + <window_period>30</window_period> + <creation_time>1397458800</creation_time> + <label>Splunk Internal License DO NOT DISTRIBUTE</label> + <expiration_time>1397804400</expiration_time> + <features> + <feature>Auth</feature> + <feature>FwdData</feature> + <feature>RcvData</feature> + <feature>LocalSearch</feature> + <feature>DistSearch</feature> + <feature>RcvSearch</feature> + <feature>ScheduledSearch</feature> + <feature>Alerting</feature> + <feature>DeployClient</feature> + <feature>DeployServer</feature> + <feature>SplunkWeb</feature> + <feature>SigningProcessor</feature> + <feature>SyslogOutputProcessor</feature> + <feature>AllowDuplicateKeys</feature> + <feature>CanBeRemoteMaster</feature> + </features> + <sourcetypes/> + <guid>C759B91D-3D25-438F-95CA-092C512DFBEC</guid> + </payload> +</license> \ No newline at end of file From 76ceb1991e0423a178890ac5838eccfd7f29d7f3 Mon Sep 17 00:00:00 2001 From: Abhi Shah <abhis@splunk.com> Date: Thu, 26 Oct 2023 14:51:18 +0530 Subject: [PATCH 08/12] removing files added by mistake --- .../test/java/com/splunk/splunk.license.xml | 33 ------------------- .../splunk_at_least_cupcake.license.xml | 33 ------------------- 2 files changed, 66 deletions(-) delete mode 100644 splunk/src/test/java/com/splunk/splunk.license.xml delete mode 100644 splunk/src/test/java/com/splunk/splunk_at_least_cupcake.license.xml diff --git a/splunk/src/test/java/com/splunk/splunk.license.xml b/splunk/src/test/java/com/splunk/splunk.license.xml deleted file mode 100644 index 23f09657..00000000 --- a/splunk/src/test/java/com/splunk/splunk.license.xml +++ /dev/null @@ -1,33 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<license> - <signature>MF5kKJJZo5PFE9A8YIxMo5L+7kIHzFkGMweNLaLXbiNI2OdP9ZazXDcC8/ioswEVXA2IWj14q6TKK9gQB+2VO5TakyFF45QgaWGJwuGtq6hYVIPiwMSY7SnlHFys30nzE7cQ0V1EwMDL03pMvM3YzbvbpNHcm92riGoAtj01QMbBm1KE/oxdR+XiYnFqFNNlUMVLwtygarUdaJNmy15lOTQdHsvW7Cf3F7gKb12NPUi3xBg1PApsc3O3pfQYsFTzXrJC+Fwfrx5P/0ha+bwrSN3idmGiIg1FiPVoTIiQdxyRTFhKYV6ly2eDPSxj4TnAwnp8HDBNNqHOZ9LYpRF/kA==</signature> - <payload> - <type>enterprise</type> - <group_id>Enterprise</group_id> - <quota>1048576</quota> - <max_violations>5</max_violations> - <window_period>30</window_period> - <creation_time>1321914737</creation_time> - <label>Splunk Developer Personal License NOT FOR RESALE</label> - <expiration_time>9961828337</expiration_time> - <features> - <feature>Auth</feature> - <feature>FwdData</feature> - <feature>RcvData</feature> - <feature>LocalSearch</feature> - <feature>DistSearch</feature> - <feature>RcvSearch</feature> - <feature>ScheduledSearch</feature> - <feature>Alerting</feature> - <feature>DeployClient</feature> - <feature>DeployServer</feature> - <feature>SplunkWeb</feature> - <feature>SigningProcessor</feature> - <feature>SyslogOutputProcessor</feature> - <feature>AllowDuplicateKeys</feature> - <feature>CanBeRemoteMaster</feature> - </features> - <sourcetypes/> - <guid>AD1D800A-04CC-4ED2-9754-85335ECEA6E3</guid> - </payload> -</license> \ No newline at end of file diff --git a/splunk/src/test/java/com/splunk/splunk_at_least_cupcake.license.xml b/splunk/src/test/java/com/splunk/splunk_at_least_cupcake.license.xml deleted file mode 100644 index 51828bbd..00000000 --- a/splunk/src/test/java/com/splunk/splunk_at_least_cupcake.license.xml +++ /dev/null @@ -1,33 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<license> - <signature>dTEDyZNMuiTE7Nu1Y3ZUr8l9pbGaX/RRo/eAhsGBHuo23VyYzaiyHnPHyr8IPL7d3opT/4EBfrCNI4HGwN1EOdiES9mt91fDQvEFx8f+p+RJnlFJfRQqbbWZWWjqxfJvYIlBc9GDiZ0xSo+Bc+DuYdlkG4f+WywHIH/k9HN6snqCxVojRJwiJAGjDn5FmUVIcaKCF84MaRVFPmRlicNzQ6pYvQjimPaUHNoP5NB9rgWg4ehaZ+bfR2AcjpgSBNiJOwKRI9EdNLA7GPXphcujyKynl45RSWZmqd5vQMHIH4a2BTeK+0QwPoJni4CUtN19yUFUNOiXHjx6Aunjw9qVtA==</signature> - <payload> - <type>enterprise</type> - <group_id>Enterprise</group_id> - <quota>104857600</quota> - <max_violations>5</max_violations> - <window_period>30</window_period> - <creation_time>1397458800</creation_time> - <label>Splunk Internal License DO NOT DISTRIBUTE</label> - <expiration_time>1397804400</expiration_time> - <features> - <feature>Auth</feature> - <feature>FwdData</feature> - <feature>RcvData</feature> - <feature>LocalSearch</feature> - <feature>DistSearch</feature> - <feature>RcvSearch</feature> - <feature>ScheduledSearch</feature> - <feature>Alerting</feature> - <feature>DeployClient</feature> - <feature>DeployServer</feature> - <feature>SplunkWeb</feature> - <feature>SigningProcessor</feature> - <feature>SyslogOutputProcessor</feature> - <feature>AllowDuplicateKeys</feature> - <feature>CanBeRemoteMaster</feature> - </features> - <sourcetypes/> - <guid>C759B91D-3D25-438F-95CA-092C512DFBEC</guid> - </payload> -</license> \ No newline at end of file From 5ccfc9b28c7bfc47ea0329d89609fc86328db552 Mon Sep 17 00:00:00 2001 From: Abhi Shah <abhis@splunk.com> Date: Fri, 27 Oct 2023 17:23:51 +0530 Subject: [PATCH 09/12] Sonarlint fixes --- splunk/src/main/java/com/splunk/AtomFeed.java | 1 - splunk/src/main/java/com/splunk/CollectionArgs.java | 4 ++-- splunk/src/main/java/com/splunk/Command.java | 5 +++-- splunk/src/main/java/com/splunk/DataModel.java | 4 ++-- splunk/src/main/java/com/splunk/DataModelField.java | 2 +- splunk/src/main/java/com/splunk/FieldType.java | 2 +- splunk/src/main/java/com/splunk/ResultsReader.java | 3 +-- .../main/java/com/splunk/modularinput/Argument.java | 2 +- .../main/java/com/splunk/modularinput/EventWriter.java | 6 ++++-- .../src/main/java/com/splunk/modularinput/Scheme.java | 2 +- splunk/src/test/java/com/splunk/ApplicationTest.java | 4 ++-- splunk/src/test/java/com/splunk/EntityTest.java | 2 +- splunk/src/test/java/com/splunk/HttpServiceTest.java | 6 +++--- splunk/src/test/java/com/splunk/IndexTest.java | 2 +- splunk/src/test/java/com/splunk/InputCrudTest.java | 2 +- splunk/src/test/java/com/splunk/NamespaceTest.java | 8 ++------ splunk/src/test/java/com/splunk/OutputDefaultTest.java | 2 +- splunk/src/test/java/com/splunk/PasswordTest.java | 4 ++-- splunk/src/test/java/com/splunk/ResultsReaderTest.java | 8 ++++---- splunk/src/test/java/com/splunk/SavedSearchTest.java | 6 +++--- splunk/src/test/java/com/splunk/SearchJobTest.java | 10 +++++----- splunk/src/test/java/com/splunk/ServiceTest.java | 4 ++-- 22 files changed, 43 insertions(+), 46 deletions(-) diff --git a/splunk/src/main/java/com/splunk/AtomFeed.java b/splunk/src/main/java/com/splunk/AtomFeed.java index 2cacb86e..3de9b75c 100644 --- a/splunk/src/main/java/com/splunk/AtomFeed.java +++ b/splunk/src/main/java/com/splunk/AtomFeed.java @@ -20,7 +20,6 @@ import java.util.*; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; -import javax.xml.stream.XMLStreamConstants; /** * The {@code AtomFeed} class represents an Atom feed. diff --git a/splunk/src/main/java/com/splunk/CollectionArgs.java b/splunk/src/main/java/com/splunk/CollectionArgs.java index d9c32b2f..741ad4d3 100644 --- a/splunk/src/main/java/com/splunk/CollectionArgs.java +++ b/splunk/src/main/java/com/splunk/CollectionArgs.java @@ -26,7 +26,7 @@ public class CollectionArgs extends Args { /** * Indicates whether to sort entries in ascending or descending order. */ - public static enum SortDirection { + public enum SortDirection { /** Sort entries in ascending order. */ ASC("asc"), /** Sort entries in descending order. */ @@ -49,7 +49,7 @@ public String toString() { /** * Indicates the sorting mode for entries. */ - public static enum SortMode { + public enum SortMode { /** * If all values of the field are numbers, collate numerically. * Otherwise, collate alphabetically. diff --git a/splunk/src/main/java/com/splunk/Command.java b/splunk/src/main/java/com/splunk/Command.java index c37a8d03..aaee5d9c 100644 --- a/splunk/src/main/java/com/splunk/Command.java +++ b/splunk/src/main/java/com/splunk/Command.java @@ -18,7 +18,6 @@ import java.io.BufferedReader; import java.io.File; -import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; @@ -190,7 +189,9 @@ else if (type == Integer.class) { java.lang.reflect.Field field = this.getClass().getField(name); field.set(this, value); } - catch (NoSuchFieldException e) { continue; } + catch (NoSuchFieldException e) { + continue; + } catch (IllegalAccessException e) { throw new RuntimeException(e.getMessage(), e); } diff --git a/splunk/src/main/java/com/splunk/DataModel.java b/splunk/src/main/java/com/splunk/DataModel.java index 028e13e3..82a4c13b 100644 --- a/splunk/src/main/java/com/splunk/DataModel.java +++ b/splunk/src/main/java/com/splunk/DataModel.java @@ -28,8 +28,8 @@ * data model objects, which specify structured views on Splunk data. */ public class DataModel extends Entity { - private final static JsonParser jsonParser = new JsonParser(); - private final static Gson gson = new Gson(); + private static final JsonParser jsonParser = new JsonParser(); + private static final Gson gson = new Gson(); private static final String ACCELERATION_LABEL = "acceleration"; private static final String MODEL_NAME_LABEL = "modelName"; diff --git a/splunk/src/main/java/com/splunk/DataModelField.java b/splunk/src/main/java/com/splunk/DataModelField.java index d83dca1c..3487f80e 100644 --- a/splunk/src/main/java/com/splunk/DataModelField.java +++ b/splunk/src/main/java/com/splunk/DataModelField.java @@ -44,7 +44,7 @@ private DataModelField() {} /** * @return The name of this field. */ - public String getName() { return this.name; }; + public String getName() { return this.name; } /** * Return the name of the data model object on which this field is defined. That need not diff --git a/splunk/src/main/java/com/splunk/FieldType.java b/splunk/src/main/java/com/splunk/FieldType.java index e6bc6b2c..ff0253b0 100644 --- a/splunk/src/main/java/com/splunk/FieldType.java +++ b/splunk/src/main/java/com/splunk/FieldType.java @@ -50,7 +50,7 @@ public String toString() { } }; - private final static Map<String, FieldType> typeLookup = new HashMap<>() {{ + private static final Map<String, FieldType> typeLookup = new HashMap<>() {{ put("string", STRING); put("number", NUMBER); put("boolean", BOOLEAN); diff --git a/splunk/src/main/java/com/splunk/ResultsReader.java b/splunk/src/main/java/com/splunk/ResultsReader.java index d2c60777..a0f8a8eb 100644 --- a/splunk/src/main/java/com/splunk/ResultsReader.java +++ b/splunk/src/main/java/com/splunk/ResultsReader.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.io.InputStream; -import java.io.InputStreamReader; import java.util.Iterator; /** @@ -60,7 +59,7 @@ public void close() throws IOException { * {@link Event} class to interpret multi-item values. * @throws IOException On IO exception. */ - final public Event getNextEvent() throws IOException { + public final Event getNextEvent() throws IOException { return getNextElement(); } diff --git a/splunk/src/main/java/com/splunk/modularinput/Argument.java b/splunk/src/main/java/com/splunk/modularinput/Argument.java index 1a171a60..88b4b776 100755 --- a/splunk/src/main/java/com/splunk/modularinput/Argument.java +++ b/splunk/src/main/java/com/splunk/modularinput/Argument.java @@ -31,7 +31,7 @@ public class Argument { private static DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); - public enum DataType { BOOLEAN, NUMBER, STRING }; + public enum DataType { BOOLEAN, NUMBER, STRING } // Name used to identify this argument in Splunk. protected String name; diff --git a/splunk/src/main/java/com/splunk/modularinput/EventWriter.java b/splunk/src/main/java/com/splunk/modularinput/EventWriter.java index c6ce3425..3a8f9449 100755 --- a/splunk/src/main/java/com/splunk/modularinput/EventWriter.java +++ b/splunk/src/main/java/com/splunk/modularinput/EventWriter.java @@ -21,11 +21,13 @@ import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; -import javax.xml.transform.*; +import javax.xml.transform.OutputKeys; +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerException; +import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import java.io.*; -import java.util.logging.Level; /** * The {@code EventWriter} class encapsulates writing events and error messages to Splunk from a modular input. diff --git a/splunk/src/main/java/com/splunk/modularinput/Scheme.java b/splunk/src/main/java/com/splunk/modularinput/Scheme.java index dbc99ce6..1ef388b1 100644 --- a/splunk/src/main/java/com/splunk/modularinput/Scheme.java +++ b/splunk/src/main/java/com/splunk/modularinput/Scheme.java @@ -37,7 +37,7 @@ public class Scheme { private static DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); - public enum StreamingMode { SIMPLE, XML }; + public enum StreamingMode { SIMPLE, XML } // Name of this module input kind. <tt>title</tt> will be used as the URL scheme when // specifying particular modular inputs. For example, if <tt>title</tt> is <tt>"abc"</tt>, diff --git a/splunk/src/test/java/com/splunk/ApplicationTest.java b/splunk/src/test/java/com/splunk/ApplicationTest.java index 80c5b1d9..4094d09f 100644 --- a/splunk/src/test/java/com/splunk/ApplicationTest.java +++ b/splunk/src/test/java/com/splunk/ApplicationTest.java @@ -95,7 +95,7 @@ public void testForSetupPresent() throws Exception { new ByteArrayInputStream(setupXml.getBytes("UTF-8"))); parsedSetupXml.getDocumentElement().normalize(); - Assert.assertEquals(parsedSetupXml.getDocumentElement().getNodeName(), "SetupInfo"); + Assert.assertEquals("SetupInfo", parsedSetupXml.getDocumentElement().getNodeName()); NodeList blocks = parsedSetupXml.getDocumentElement().getElementsByTagName("block"); Assert.assertEquals(1, blocks.getLength()); @@ -113,7 +113,7 @@ public void testArchive() { Assert.assertTrue(filePath.endsWith(applicationName + ".spl")); } Assert.assertFalse(archive.getRefresh()); - Assert.assertTrue(archive.getUrl() != null); + Assert.assertNotNull(archive.getUrl()); } @Test diff --git a/splunk/src/test/java/com/splunk/EntityTest.java b/splunk/src/test/java/com/splunk/EntityTest.java index 4760685f..5eaa2063 100644 --- a/splunk/src/test/java/com/splunk/EntityTest.java +++ b/splunk/src/test/java/com/splunk/EntityTest.java @@ -112,6 +112,6 @@ public void testResourceCollection() { Assert.assertTrue(indexes.equals(indexes.items)); Assert.assertTrue(indexes.hashCode() != 0); Assert.assertTrue(indexes.keySet().contains("main")); - Assert.assertTrue(indexes.valueSize("main") == 1); + Assert.assertEquals(1, indexes.valueSize("main")); } } diff --git a/splunk/src/test/java/com/splunk/HttpServiceTest.java b/splunk/src/test/java/com/splunk/HttpServiceTest.java index 199f7863..231d4842 100644 --- a/splunk/src/test/java/com/splunk/HttpServiceTest.java +++ b/splunk/src/test/java/com/splunk/HttpServiceTest.java @@ -85,7 +85,7 @@ public void testRequestMessage() { Assert.assertTrue(request.checkMethod(request.getMethod())); request.setMethod("POST"); Assert.assertTrue(request.checkMethod(request.getMethod())); - Assert.assertEquals(request.getMethod(), "POST"); + Assert.assertEquals("POST", request.getMethod()); ByteArrayOutputStream stream = new ByteArrayOutputStream(); try { @@ -102,8 +102,8 @@ public void testRequestMessage() { @Test public void testResponseMessage() { ResponseMessage response = new ResponseMessage(200); - Assert.assertEquals(response.getStatus(), 200); - Assert.assertTrue(response.getHeader() != null); + Assert.assertEquals(200, response.getStatus()); + Assert.assertNotNull(response.getHeader()); } @Test(expected = IllegalArgumentException.class) diff --git a/splunk/src/test/java/com/splunk/IndexTest.java b/splunk/src/test/java/com/splunk/IndexTest.java index cd6f06a8..00e9696f 100644 --- a/splunk/src/test/java/com/splunk/IndexTest.java +++ b/splunk/src/test/java/com/splunk/IndexTest.java @@ -757,7 +757,7 @@ public void testUploadArgsFailure() throws Exception{ Assert.fail("Uploading to an index with an index argument? No need for redundancy!"); } catch(Exception e){ - Assert.assertEquals(e.getMessage(), "The 'index' parameter cannot be passed to an index's oneshot upload."); + Assert.assertEquals("The 'index' parameter cannot be passed to an index's oneshot upload.", e.getMessage()); } } diff --git a/splunk/src/test/java/com/splunk/InputCrudTest.java b/splunk/src/test/java/com/splunk/InputCrudTest.java index d0d3a1f7..79d7a0b0 100644 --- a/splunk/src/test/java/com/splunk/InputCrudTest.java +++ b/splunk/src/test/java/com/splunk/InputCrudTest.java @@ -521,7 +521,7 @@ public void testWindowsPerfmonInputCrud() { Assert.assertEquals(1, windowsPerfmonInput.getCounters().length); Assert.assertTrue(contains(windowsPerfmonInput.getCounters(), "% Privileged Time")); - Assert.assertEquals(windowsPerfmonInput.getIndex(), "main"); + Assert.assertEquals("main", windowsPerfmonInput.getIndex()); Assert.assertTrue(contains(windowsPerfmonInput.getInstances(), "wininit")); Assert.assertEquals(1200, windowsPerfmonInput.getInterval()); Assert.assertEquals("Process", windowsPerfmonInput.getObject()); diff --git a/splunk/src/test/java/com/splunk/NamespaceTest.java b/splunk/src/test/java/com/splunk/NamespaceTest.java index 16a332ad..40e88979 100644 --- a/splunk/src/test/java/com/splunk/NamespaceTest.java +++ b/splunk/src/test/java/com/splunk/NamespaceTest.java @@ -264,12 +264,8 @@ public void testNamespaceConflicts() { Assert.assertEquals(query1, service.getSavedSearches(namespace1).get(savedSearchName).getSearch()); Assert.assertEquals(query2, service.getSavedSearches(namespace2).get(savedSearchName).getSearch()); - try { - service.getSavedSearches(wildcardNamespace).get(savedSearchName).getSearch(); - Assert.fail("Expected SplunkException about multiple keys."); - } catch (SplunkException e) { - - } + service.getSavedSearches(wildcardNamespace).get(savedSearchName).getSearch(); + Assert.fail("Expected SplunkException about multiple keys."); } finally { if (service.getSavedSearches(namespace1).containsKey(savedSearchName)) { service.getSavedSearches(namespace1).remove(savedSearchName); diff --git a/splunk/src/test/java/com/splunk/OutputDefaultTest.java b/splunk/src/test/java/com/splunk/OutputDefaultTest.java index 55f28389..93b3bb4f 100644 --- a/splunk/src/test/java/com/splunk/OutputDefaultTest.java +++ b/splunk/src/test/java/com/splunk/OutputDefaultTest.java @@ -45,7 +45,7 @@ public void testOutputDefault() throws Exception { // Probe { outputDefault.setMaxQueueSize("1MB"); - Assert.assertEquals(outputDefault.getMaxQueueSize(), "1MB"); + Assert.assertEquals("1MB", outputDefault.getMaxQueueSize()); outputDefault.setMaxQueueSize(maxQueueSize); Assert.assertEquals(outputDefault.getMaxQueueSize(), maxQueueSize); diff --git a/splunk/src/test/java/com/splunk/PasswordTest.java b/splunk/src/test/java/com/splunk/PasswordTest.java index fb03f264..5bb46ae7 100644 --- a/splunk/src/test/java/com/splunk/PasswordTest.java +++ b/splunk/src/test/java/com/splunk/PasswordTest.java @@ -141,7 +141,7 @@ public void testPasswordsWithWildCards(){ args.put("password", "changed!"); Service service = Service.connect(args); PasswordCollection passwords = service.getPasswords(); - Assert.assertEquals(passwords.size(),0); + Assert.assertEquals(0, passwords.size()); String name = "no-owner"; String value = "sdk-test-password"; @@ -171,6 +171,6 @@ public void testPasswordsWithWildCards(){ Assert.assertEquals("app context must be specified when removing a password.", e.getMessage()); } passwords = service.getPasswords(); - Assert.assertEquals(passwords.size(),0); + Assert.assertEquals(0, passwords.size()); } } diff --git a/splunk/src/test/java/com/splunk/ResultsReaderTest.java b/splunk/src/test/java/com/splunk/ResultsReaderTest.java index 6d346984..b0e69c4f 100644 --- a/splunk/src/test/java/com/splunk/ResultsReaderTest.java +++ b/splunk/src/test/java/com/splunk/ResultsReaderTest.java @@ -341,7 +341,7 @@ private void testPreviewSingleReaderXml(boolean useIter) throws Exception { String[] fieldNameArray = new String[0]; fieldNameArray = reader.getFields().toArray(fieldNameArray); Assert.assertEquals(101, fieldNameArray.length); - Assert.assertEquals(fieldNameArray[99], "useragent"); + Assert.assertEquals("useragent", fieldNameArray[99]); int index = 0; Event lastEvent = null; @@ -456,13 +456,13 @@ private void testExportMultiReader( switch (indexResultSet) { case 0: - Assert.assertEquals(indexEvent, 1); + Assert.assertEquals(1, indexEvent); break; case 1: - Assert.assertEquals(indexEvent, 3); + Assert.assertEquals(3, indexEvent); break; default: - Assert.assertEquals(indexEvent, 5); + Assert.assertEquals(5, indexEvent); break; } indexResultSet++; diff --git a/splunk/src/test/java/com/splunk/SavedSearchTest.java b/splunk/src/test/java/com/splunk/SavedSearchTest.java index 86b4d697..a96f5d46 100644 --- a/splunk/src/test/java/com/splunk/SavedSearchTest.java +++ b/splunk/src/test/java/com/splunk/SavedSearchTest.java @@ -246,7 +246,7 @@ public void testUpdate() { boolean isPre620 = service.versionIsEarlierThan("6.2.0"); try { - Assert.assertEquals(savedSearch.isEmbedEnabled(), false); + Assert.assertEquals(false, savedSearch.isEmbedEnabled()); Assert.assertNull(savedSearch.getEmbedToken()); if (isPre620) Assert.fail("Expected UnsupportedOperationException"); @@ -373,8 +373,8 @@ public void testACLUpdates(){ args.add("perms.read","admin, nobody"); savedSearch.aclUpdate(args); aclInfo = savedSearch.getMetadata().getEaiAcl(); - Assert.assertEquals(aclInfo.getString("sharing"), "app"); - Assert.assertEquals(aclInfo.getString("owner"), "nobody"); + Assert.assertEquals("app", aclInfo.getString("sharing")); + Assert.assertEquals("nobody", aclInfo.getString("owner")); Assert.assertNotNull(aclInfo.get("perms")); } diff --git a/splunk/src/test/java/com/splunk/SearchJobTest.java b/splunk/src/test/java/com/splunk/SearchJobTest.java index 3169172d..74434390 100644 --- a/splunk/src/test/java/com/splunk/SearchJobTest.java +++ b/splunk/src/test/java/com/splunk/SearchJobTest.java @@ -340,7 +340,7 @@ public void testEventArgs(Job job) throws IOException, InterruptedException { while(true) { HashMap<String, String> found = reader.getNextEvent(); if (found != null) { - Assert.assertEquals(found.get("_raw").split("\n").length, 1); + Assert.assertEquals(1, found.get("_raw").split("\n").length); Assert.assertFalse(found.containsKey("date_month")); Assert.assertEquals(Integer.parseInt(found.get("_serial")), count + 2); count++; @@ -367,7 +367,7 @@ public void testResultArgs(Job job) throws IOException, InterruptedException { while(true) { HashMap<String, String> found = reader.getNextEvent(); if (found != null) { - Assert.assertEquals(found.get("_raw").split("\n").length, 1); + Assert.assertEquals(1, found.get("_raw").split("\n").length); Assert.assertFalse(found.containsKey("date_month")); Assert.assertEquals(Integer.parseInt(found.get("_serial")), count + 2); count++; @@ -390,7 +390,7 @@ public void testResultArgs(Job job) throws IOException, InterruptedException { while(true) { HashMap<String, String> found = reader2.getNextEvent(); if (found != null) { - Assert.assertEquals(found.get("count"), "10"); + Assert.assertEquals("10", found.get("count")); count2++; } else { @@ -415,7 +415,7 @@ public void testPreviewArgs(Job job) throws IOException, InterruptedException { while (true) { HashMap<String, String> found = reader.getNextEvent(); if (found != null) { - Assert.assertEquals(found.get("_raw").split("\n").length, 1); + Assert.assertEquals(1, found.get("_raw").split("\n").length); Assert.assertFalse(found.containsKey("date_month")); Assert.assertEquals(Integer.parseInt(found.get("_serial")), count + 2); count++; @@ -438,7 +438,7 @@ public void testPreviewArgs(Job job) throws IOException, InterruptedException { while(true) { HashMap<String, String> found = reader2.getNextEvent(); if (found != null) { - Assert.assertEquals(found.get("count"), "10"); + Assert.assertEquals("10", found.get("count")); count2++; } else { diff --git a/splunk/src/test/java/com/splunk/ServiceTest.java b/splunk/src/test/java/com/splunk/ServiceTest.java index 74427442..d9efa699 100644 --- a/splunk/src/test/java/com/splunk/ServiceTest.java +++ b/splunk/src/test/java/com/splunk/ServiceTest.java @@ -160,7 +160,7 @@ public void testServiceWithCustomHeaders() { }}); Service service = new Service(args); Map<String, String> customHeaders = service.getCustomHeaders(); - Assert.assertEquals(customHeaders.get("some header key"), "some value"); + Assert.assertEquals("some value", customHeaders.get("some header key")); } @Test @@ -522,7 +522,7 @@ public void testNewServiceArgs() { args.setToken("Splunk MY_SESSION_KEY"); Assert.assertEquals("Arg setters didn't replicate value to deprecated fields.", - args.app, "myapp"); + "myapp", args.app); Service service = new Service(args); Assert.assertEquals(args.app, service.getApp()); From c5e6dacfa499627a51805ab8515410933f93101c Mon Sep 17 00:00:00 2001 From: Abhi Shah <abhis@splunk.com> Date: Thu, 25 Apr 2024 15:36:01 +0530 Subject: [PATCH 10/12] Update ApplicationTest.java --- splunk/src/test/java/com/splunk/ApplicationTest.java | 1 - 1 file changed, 1 deletion(-) diff --git a/splunk/src/test/java/com/splunk/ApplicationTest.java b/splunk/src/test/java/com/splunk/ApplicationTest.java index 4094d09f..293e5f81 100644 --- a/splunk/src/test/java/com/splunk/ApplicationTest.java +++ b/splunk/src/test/java/com/splunk/ApplicationTest.java @@ -32,7 +32,6 @@ public class ApplicationTest extends SDKTestCase { @Override public void setUp() throws Exception { super.setUp(); - removeTestApplications(); applicationName = createTemporaryName(); From c74ead06c3add82326df00bf9c3781d3421ddfb0 Mon Sep 17 00:00:00 2001 From: Abhi Shah <abhis@splunk.com> Date: Thu, 25 Apr 2024 18:07:51 +0530 Subject: [PATCH 11/12] Updated GitHub action versions --- .github/workflows/release.yml | 6 +++--- .github/workflows/test.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8d75e68e..e7eae23c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,9 +12,9 @@ jobs: name: Java SDK Release runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up JDK 1.8 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: 17 distribution: oracle @@ -37,7 +37,7 @@ jobs: MAVEN_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }} - name: Upload Artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: java_sdk_docs path: splunk/target/apidocs diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 5e61113a..125e5dab 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -35,15 +35,15 @@ jobs: - 10668:10668/udp steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up JDK - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: oracle java-version: 17 - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} From c2089b78ac94fcc6a4078a3b4e815a50d5c418ca Mon Sep 17 00:00:00 2001 From: Abhi Shah <abhis@splunk.com> Date: Thu, 25 Apr 2024 18:13:09 +0530 Subject: [PATCH 12/12] Update NamespaceTest.java --- splunk/src/test/java/com/splunk/NamespaceTest.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/splunk/src/test/java/com/splunk/NamespaceTest.java b/splunk/src/test/java/com/splunk/NamespaceTest.java index 40e88979..86f653e2 100644 --- a/splunk/src/test/java/com/splunk/NamespaceTest.java +++ b/splunk/src/test/java/com/splunk/NamespaceTest.java @@ -265,7 +265,9 @@ public void testNamespaceConflicts() { Assert.assertEquals(query2, service.getSavedSearches(namespace2).get(savedSearchName).getSearch()); service.getSavedSearches(wildcardNamespace).get(savedSearchName).getSearch(); - Assert.fail("Expected SplunkException about multiple keys."); + Assert.fail("Expected SplunkException about multiple keys not Throwns"); + } catch (SplunkException SE){ + Assert.assertNotNull(SE); } finally { if (service.getSavedSearches(namespace1).containsKey(savedSearchName)) { service.getSavedSearches(namespace1).remove(savedSearchName);