Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HDDS-10697. Add Page to stream Recon logs #7253

Draft
wants to merge 9 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -90,4 +90,14 @@ private ReconConstants() {
(double) MAX_CONTAINER_SIZE_UPPER_BOUND /
MIN_CONTAINER_SIZE_UPPER_BOUND) /
Math.log(2)) + 1;

// Log API constants
public static final String RECON_LOG_OFFSET = "offset";
public static final String DEFAULT_RECON_LOG_OFFSET = "0";

public static final String RECON_LOG_LINES = "num";
public static final String DEFAULT_RECON_LOG_LINES = "100";

public static final String RECON_LOG_DIRECTION = "direction";
public static final String DEFAULT_RECON_LOG_DIRECTION = "NEUTRAL";
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
package org.apache.hadoop.ozone.recon.api;

import org.apache.hadoop.ozone.recon.api.types.ResponseStatus;
import org.apache.hadoop.ozone.recon.logging.LogFetcher;
import org.apache.hadoop.ozone.recon.logging.LogFetcherImpl;
import org.apache.hadoop.ozone.recon.logging.LogFileEmptyException;
import org.apache.hadoop.ozone.recon.logging.LogModels.LoggerResponse;

import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;


import java.io.FileNotFoundException;
import java.io.IOException;
import java.text.ParseException;
import java.util.Arrays;

import static org.apache.hadoop.ozone.recon.ReconConstants.RECON_LOG_OFFSET;
import static org.apache.hadoop.ozone.recon.ReconConstants.RECON_LOG_LINES;
import static org.apache.hadoop.ozone.recon.ReconConstants.DEFAULT_RECON_LOG_LINES;
import static org.apache.hadoop.ozone.recon.ReconConstants.RECON_LOG_DIRECTION;
import static org.apache.hadoop.ozone.recon.ReconConstants.DEFAULT_RECON_LOG_DIRECTION;


@Path("/log")
@Produces(MediaType.APPLICATION_JSON)
public class LogEndpoint {

String logDir;
String logFile;
String RECON_LOG_FILE_LOC;

private final LogFetcherImpl logFetcher;
public LogEndpoint() {
logDir = System.getProperty("hadoop.log.dir");
logFile = "ozone-recon.log";
RECON_LOG_FILE_LOC = logDir + "/" + logFile;
logFetcher = new LogFetcherImpl();
}

@GET
@Path("/read")
public Response getLogLines() throws IOException{
LoggerResponse.Builder respBuilder;
LogFetcherImpl logFetcher = new LogFetcherImpl();
try {
logFetcher.initializeReader(RECON_LOG_FILE_LOC);
respBuilder = logFetcher.getLogs(100);
} catch (ParseException pe) {
return Response.serverError()
.entity("Unable to parse timestamp for log: \n" + Arrays.toString(pe.getStackTrace()))
.build();
} catch (IOException ie) {
return Response.serverError()
.entity("Unable to open log file: \n" + Arrays.toString(ie.getStackTrace()))
.build();
} catch (NullPointerException npe) {
return Response.serverError()
.entity("NullPointerException: \n" + Arrays.toString(npe.getStackTrace()))
.build();
} catch (LogFileEmptyException e){
return Response.status(425)
.entity(e.getMessage())
.build();
}
finally {
logFetcher.close();
}
return Response.ok(
respBuilder.setStatus(ResponseStatus.OK).build()
).build();
}

/**
* Fetches the logs line by line for
*
* @param offset Stores the last log line that was read
* @param lines Stores the number of lines to fetch from the log
* @param direction Stores the direction in which to fetch the logs
* i.e. whether to fetch next lines or previous lines.
*
* @return {@link Response} of the following format
*/
@POST
@Path("/read")
public Response getLogLines(
@QueryParam(RECON_LOG_OFFSET)
int offset,
@DefaultValue(DEFAULT_RECON_LOG_LINES) @QueryParam(RECON_LOG_LINES)
int lines,
@DefaultValue(DEFAULT_RECON_LOG_DIRECTION) @QueryParam(RECON_LOG_DIRECTION)
LogFetcher.Direction direction
) throws IOException{
LoggerResponse.Builder respBuilder;
try {
logFetcher.initializeReader(RECON_LOG_FILE_LOC);
respBuilder = logFetcher.getLogs(offset, direction, lines);
} catch (ParseException pe) {
return Response.serverError()
.entity("Unable to parse timestamp for log: \n" + Arrays.toString(pe.getStackTrace()))
.build();
} catch (FileNotFoundException fe) {
return Response.serverError()
.entity("Unable to find log file: \n" + Arrays.toString(fe.getStackTrace()))
.build();
} catch (NullPointerException npe) {
return Response.serverError()
.entity("NullPointerException: \n" + Arrays.toString(npe.getStackTrace()))
.build();
} catch (LogFileEmptyException e){
return Response.status(425)
.entity(e.getMessage())
.build();
} finally {
logFetcher.close();
}

return Response.ok(
respBuilder.setStatus(ResponseStatus.OK).build()
).build();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package org.apache.hadoop.ozone.recon.logging;

import org.apache.hadoop.ozone.recon.logging.LogModels.LoggerResponse;

import java.io.IOException;
import java.text.ParseException;

public interface LogFetcher {

enum Direction {
FORWARD,
REVERSE,
NEUTRAL
}

/**
* Return the log lines from offset in the provided direction
*
* @param offset The offset of the log line
* @param direction The direction towards which we fetch
* log data
* @param events The number of log events to fetch
* @return A JSON formatted string of log events
*
* @throws IOException in case something goes wrong during file I/O operations
* @throws ParseException in case of error during parsing of event timestamp
*/
LoggerResponse.Builder getLogs(long offset, Direction direction, int events)
throws IOException, ParseException, LogFileEmptyException;

/**
* TODO:
* Return the log lines from timestamp in the provided direction
*
* @param timestamp The timestamp at which we need to read logs
* @param direction The direction towards which we fetch
* log data
* @param lines The number of log lines to fetch
* @return A JSON formatted string of log events
*/
// LoggerResponse.Builder getLogs(String timestamp, Direction direction, int lines);

/**
* TODO:
* Search through the logs for log events.
*
* @param filters The filters to use while searching.
* @param accumulator Accumulate the search results.
* @throws java.io.IOException if something goes wrong.
* @return A JSON string of the search results
*/
// LoggerResponse.Builder searchLogs(LogFilters filters, LogEventAccumulator accumulator) throws IOException;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
package org.apache.hadoop.ozone.recon.logging;

import org.apache.hadoop.ozone.recon.logging.LogModels.LogEvent;
import org.apache.hadoop.ozone.recon.logging.LogModels.LoggerResponse;
import org.apache.hadoop.ozone.recon.logging.LogReaders.LogEventReader;

import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Deque;
import java.util.LinkedList;
import java.util.NoSuchElementException;

/**
* This class will be used to implement the API call actions to /log endpoint
* We might have new data in the file, so we open the Reader in each method
* and again close it at the end
*/
public class LogFetcherImpl implements LogFetcher {

private final LogEventReader logEventReader;

public LogFetcherImpl() {
logEventReader = new LogEventReader();
}

/**
* Checks if the event provided is null or it's fields are null
* @param event Stores the event to be checked
* @return true if the event or it's fields are null else false
*/
private boolean checkEventIsNull(LogEvent event) {
return (null == event || null == event.getTimestamp()
|| null == event.getSource()
|| null == event.getLevel()
|| null == event.getMessage());
}

/**
* Method to initialize the reader with the location of the file
* @param location Stores the location of the file
* @throws IOException if something went wrong during I/O operations
* @throws LogFileEmptyException if the log file is empty
*/
public void initializeReader(String location) throws IOException, LogFileEmptyException {
logEventReader.initializeReader(location);
}

/**
* Get the logs from a given offset
* @param offset The offset of the log line
* @param direction The direction towards which we fetch
* log data
* @param events The number of log events to fetch
* @return {@link LoggerResponse.Builder} instance of the events
* @throws IOException in case of error in I/O operations
* @throws ParseException in case of error while parsing log event timestamp
* @throws NullPointerException if unable to fetch events
*/
@Override
public LoggerResponse.Builder getLogs(long offset, Direction direction, int events)
throws IOException, ParseException, NullPointerException {

// Fetch the events
Deque<LogEvent> logEventDeque = new LinkedList<>();
// Fetch the event at offset
logEventDeque.add(logEventReader.getEventAt(offset));

for (int idx = 1; idx < events; idx++) {
LogEvent event = null;
if (Direction.FORWARD == direction) {
event = logEventReader.getNextEvent();
// Did not find any event so assume end of events
// Or if the event fields are null, do not add it
if (checkEventIsNull(event)) {
break;
}
logEventDeque.add(event);
}

if (Direction.REVERSE == direction) {
event = logEventReader.getPrevEvent();
// Did not find any event so assume end of events
// Or if the event fields are null
if (checkEventIsNull(event)) {
break;
}
logEventDeque.addFirst(event);
}
}

long firstEventOffset;
long lastEventOffset;
try {
//throws NPE here if the events are not found
firstEventOffset = logEventDeque.getFirst().getOffset();
lastEventOffset = logEventDeque.getLast().getOffset();
} catch (NoSuchElementException ne) {
firstEventOffset = -1;
lastEventOffset = -1;
}

return LoggerResponse.newBuilder()
.setLogs(new ArrayList<>(logEventDeque))
.setFirstOffset(firstEventOffset)
.setLastOffset(lastEventOffset);
}

/**
* Get the last events number of events
* This is the default implementation for initial fetch of data
* We will start from the end of the logfile for the most recent event
* @param events Stores the number of events to get
* @return {@link LoggerResponse.Builder} instance of the events from the end
* @throws IOException if some I/O operation gave error
* @throws ParseException if unable to parse date/time
* @throws NullPointerException if unable to fetch events
*/
public LoggerResponse.Builder getLogs(int events)
throws IOException, ParseException, NullPointerException, LogFileEmptyException {

Deque<LogEvent> logEventDeque = new LinkedList<>();
LogEvent le = logEventReader.getLastEvent();

if (!checkEventIsNull(le)) {
logEventDeque.add(le);
}

for (int idx = 1; idx < events; idx++) {
LogEvent event = logEventReader.getPrevEvent();

// Did not find any event so assume end of events
// Or if the fields are empty for some reason, do not add the event
if (checkEventIsNull(event)) {
break;
}
// Since we are reading in reverse we need to add the events before current event
logEventDeque.addFirst(event);
}

long firstEventOffset;
long lastEventOffset;
try {
//throws NPE here if the events are not found
firstEventOffset = logEventDeque.getFirst().getOffset();
lastEventOffset = logEventDeque.getLast().getOffset();
} catch (NoSuchElementException ne) {
firstEventOffset = -1;
lastEventOffset = -1;
}

return LoggerResponse.newBuilder()
.setLogs(new ArrayList<>(logEventDeque))
.setFirstOffset(firstEventOffset)
.setLastOffset(lastEventOffset);
}

public void close() throws IOException {
logEventReader.close();
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package org.apache.hadoop.ozone.recon.logging;

/**
* This exception represents that the logfile that is being read
* it not yet populated i.e. has a size of 0
*/

public class LogFileEmptyException extends Exception {
/**
* Constructs an {@code LogFileEmptyException} with {@code File Not Yet populated}
* as its error detail message.
*/
public LogFileEmptyException() {
super("Logfile is not yet populated");
}
}
Loading