Skip to content

Commit

Permalink
[ZEPPELIN-6038] Unification of the Logger variable (apache#4793)
Browse files Browse the repository at this point in the history
* [ZEPPELIN-6038] Unify the Logger variable to LOGGER in all files

* [ZEPPELIN-6038] Set all logger constants to private static final

* [ZEPPELIN-6038] Adjust code character length

* [ZEPPELIN-6038] Refactor logger variable

* [ZEPPELIN-6038] Revert code for automatically generated files

* [ZEPPELIN-6038] Fix code syntax
  • Loading branch information
ParkGyeongTae authored Aug 26, 2024
1 parent 364a556 commit 32f2a8e
Show file tree
Hide file tree
Showing 98 changed files with 580 additions and 568 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@
* Alluxio interpreter for Zeppelin.
*/
public class AlluxioInterpreter extends Interpreter {
Logger logger = LoggerFactory.getLogger(AlluxioInterpreter.class);

private static final Logger LOGGER = LoggerFactory.getLogger(AlluxioInterpreter.class);

protected static final String ALLUXIO_MASTER_HOSTNAME = "alluxio.master.hostname";
protected static final String ALLUXIO_MASTER_PORT = "alluxio.master.port";
Expand Down Expand Up @@ -84,7 +84,7 @@ private Stream<String> filteredProperties(String prefix) {

@Override
public void open() {
logger.info("Starting Alluxio shell to connect to " + alluxioMasterHostname +
LOGGER.info("Starting Alluxio shell to connect to " + alluxioMasterHostname +
" on port " + alluxioMasterPort);
// Setting the extra parameters being set in the interpreter config starting with alluxio
filteredProperties("alluxio.").forEach(x -> System.setProperty(x, properties.getProperty(x)));
Expand All @@ -98,11 +98,11 @@ public void open() {

@Override
public void close() {
logger.info("Closing Alluxio shell");
LOGGER.info("Closing Alluxio shell");
try {
fs.close();
} catch (IOException e) {
logger.error("Cannot close connection", e);
LOGGER.error("Cannot close connection", e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@
*
*/
public class BigQueryInterpreter extends Interpreter {
private static Logger logger = LoggerFactory.getLogger(BigQueryInterpreter.class);
private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryInterpreter.class);
private static final char NEWLINE = '\n';
private static final char TAB = '\t';
private static Bigquery service = null;
Expand Down Expand Up @@ -117,9 +117,9 @@ public void open() {
try {
service = createAuthorizedClient();
exceptionOnConnect = null;
logger.info("Opened BigQuery SQL Connection");
LOGGER.info("Opened BigQuery SQL Connection");
} catch (IOException e) {
logger.error("Cannot open connection", e);
LOGGER.error("Cannot open connection", e);
exceptionOnConnect = e;
close();
}
Expand Down Expand Up @@ -243,7 +243,7 @@ private InterpreterResult executeSql(String sql) {
try {
pages = run(sql, projId, wTime, maxRows, useLegacySql);
} catch (IOException ex) {
logger.error(ex.getMessage());
LOGGER.error(ex.getMessage());
return new InterpreterResult(Code.ERROR, ex.getMessage());
}
try {
Expand All @@ -261,7 +261,7 @@ public static Iterator<GetQueryResultsResponse> run(final String queryString,
final String projId, final long wTime, final long maxRows, Boolean useLegacySql)
throws IOException {
try {
logger.info("Use legacy sql: {}", useLegacySql);
LOGGER.info("Use legacy sql: {}", useLegacySql);
QueryResponse query;
query = service
.jobs()
Expand All @@ -283,14 +283,14 @@ public static Iterator<GetQueryResultsResponse> run(final String queryString,

@Override
public void close() {
logger.info("Close bqsql connection!");
LOGGER.info("Close bqsql connection!");

service = null;
}

@Override
public InterpreterResult interpret(String sql, InterpreterContext contextInterpreter) {
logger.info("Run SQL command '{}'", sql);
LOGGER.info("Run SQL command '{}'", sql);
return executeSql(sql);
}

Expand All @@ -312,19 +312,19 @@ public int getProgress(InterpreterContext context) {

@Override
public void cancel(InterpreterContext context) {
logger.info("Trying to Cancel current query statement.");
LOGGER.info("Trying to Cancel current query statement.");

if (service != null && jobId != null && projectId != null) {
try {
Bigquery.Jobs.Cancel request = service.jobs().cancel(projectId, jobId);
JobCancelResponse response = request.execute();
jobId = null;
logger.info("Query Execution cancelled");
LOGGER.info("Query Execution cancelled");
} catch (IOException ex) {
logger.error("Could not cancel the SQL execution");
LOGGER.error("Could not cancel the SQL execution");
}
} else {
logger.info("Query Execution was already cancelled");
LOGGER.info("Query Execution was already cancelled");
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
* Elasticsearch Interpreter for Zeppelin.
*/
public class ElasticsearchInterpreter extends Interpreter {
private static Logger logger = LoggerFactory.getLogger(ElasticsearchInterpreter.class);
private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchInterpreter.class);

private static final String HELP = "Elasticsearch interpreter:\n"
+ "General format: <command> /<indices>/<types>/<id> <option> <JSON>\n"
Expand Down Expand Up @@ -114,7 +114,7 @@ public ElasticsearchInterpreter(Properties property) {

@Override
public void open() {
logger.info("Properties: {}", getProperties());
LOGGER.info("Properties: {}", getProperties());

ElasticsearchClientType clientType =
ElasticsearchClientTypeBuilder
Expand All @@ -125,7 +125,7 @@ public void open() {
this.resultSize = Integer.parseInt(getProperty(ELASTICSEARCH_RESULT_SIZE));
} catch (final NumberFormatException e) {
this.resultSize = 10;
logger.error("Unable to parse " + ELASTICSEARCH_RESULT_SIZE + " : " +
LOGGER.error("Unable to parse " + ELASTICSEARCH_RESULT_SIZE + " : " +
getProperty(ELASTICSEARCH_RESULT_SIZE), e);
}

Expand All @@ -135,10 +135,10 @@ public void open() {
} else if (clientType.isHttp()) {
elsClient = new HttpBasedClient(getProperties());
} else {
logger.error("Unknown type of Elasticsearch client: " + clientType);
LOGGER.error("Unknown type of Elasticsearch client: " + clientType);
}
} catch (final IOException e) {
logger.error("Open connection with Elasticsearch", e);
LOGGER.error("Open connection with Elasticsearch", e);
}
}

Expand All @@ -151,7 +151,7 @@ public void close() {

@Override
public InterpreterResult interpret(String cmd, InterpreterContext interpreterContext) {
logger.info("Run Elasticsearch command '" + cmd + "'");
LOGGER.info("Run Elasticsearch command '" + cmd + "'");

if (StringUtils.isEmpty(cmd) || StringUtils.isEmpty(cmd.trim())) {
return new InterpreterResult(InterpreterResult.Code.SUCCESS);
Expand Down Expand Up @@ -451,7 +451,7 @@ private InterpreterResult buildAggResponseMessage(Aggregations aggregations) {
headerKeys.addAll(bucketMap.keySet());
buckets.add(bucketMap);
} catch (final IOException e) {
logger.error("Processing bucket: " + e.getMessage(), e);
LOGGER.error("Processing bucket: " + e.getMessage(), e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
*
*/
public abstract class FileInterpreter extends Interpreter {
Logger logger = LoggerFactory.getLogger(FileInterpreter.class);
private static final Logger LOGGER = LoggerFactory.getLogger(FileInterpreter.class);
String currentDir = null;
CommandArgs args = null;

Expand Down Expand Up @@ -108,13 +108,13 @@ protected String getNewPath(String argument){
// Handle the command handling uniformly across all file systems
@Override
public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) {
logger.info("Run File command '" + cmd + "'");
LOGGER.info("Run File command '" + cmd + "'");

args = new CommandArgs(cmd);
args.parseArgs();

if (args.command == null) {
logger.info("Error: No command");
LOGGER.info("Error: No command");
return new InterpreterResult(Code.ERROR, Type.TEXT, "No command");
}

Expand All @@ -133,7 +133,7 @@ public InterpreterResult interpret(String cmd, InterpreterContext contextInterpr
String results = listAll(newPath);
return new InterpreterResult(Code.SUCCESS, Type.TEXT, results);
} catch (Exception e) {
logger.error("Error listing files in path " + newPath, e);
LOGGER.error("Error listing files in path " + newPath, e);
return new InterpreterResult(Code.ERROR, Type.TEXT, e.getMessage());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,14 @@
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* HDFS implementation of File interpreter for Zeppelin.
*/
public class HDFSFileInterpreter extends FileInterpreter {
private static final Logger LOGGER = LoggerFactory.getLogger(HDFSFileInterpreter.class);
static final String HDFS_URL = "hdfs.url";
static final String HDFS_USER = "hdfs.user";
static final String HDFS_MAXLENGTH = "hdfs.maxlength";
Expand All @@ -50,7 +53,7 @@ public void prepare() {
String userName = getProperty(HDFS_USER);
String hdfsUrl = getProperty(HDFS_URL);
int i = Integer.parseInt(getProperty(HDFS_MAXLENGTH));
cmd = new HDFSCommand(hdfsUrl, userName, logger, i);
cmd = new HDFSCommand(hdfsUrl, userName, LOGGER, i);
gson = new Gson();
}

Expand Down Expand Up @@ -132,10 +135,10 @@ public class AllFileStatus {
private void testConnection() {
try {
if (isDirectory("/")) {
logger.info("Successfully created WebHDFS connection");
LOGGER.info("Successfully created WebHDFS connection");
}
} catch (Exception e) {
logger.error("testConnection: Cannot open WebHDFS connection. Bad URL: " + "/", e);
LOGGER.error("testConnection: Cannot open WebHDFS connection. Bad URL: " + "/", e);
exceptionOnConnect = e;
}
}
Expand Down Expand Up @@ -212,7 +215,7 @@ public String listFile(String filePath) {
return listOne(filePath, sfs.fileStatus);
}
} catch (Exception e) {
logger.error("listFile: " + filePath, e);
LOGGER.error("listFile: " + filePath, e);
}
return "No such File or directory";
}
Expand Down Expand Up @@ -246,7 +249,7 @@ public String listAll(String path) throws InterpreterException {
return listFile(path);
}
} catch (Exception e) {
logger.error("listall: listDir " + path, e);
LOGGER.error("listall: listDir " + path, e);
throw new InterpreterException("Could not find file or directory:\t" + path);
}
}
Expand All @@ -264,7 +267,7 @@ public boolean isDirectory(String path) {
return sfs.fileStatus.type.equals("DIRECTORY");
}
} catch (Exception e) {
logger.error("IsDirectory: " + path, e);
LOGGER.error("IsDirectory: " + path, e);
return false;
}
return ret;
Expand All @@ -273,7 +276,7 @@ public boolean isDirectory(String path) {
@Override
public List<InterpreterCompletion> completion(String buf, int cursor,
InterpreterContext interpreterContext) {
logger.info("Completion request at position\t" + cursor + " in string " + buf);
LOGGER.info("Completion request at position\t" + cursor + " in string " + buf);
final List<InterpreterCompletion> suggestions = new ArrayList<>();
if (StringUtils.isEmpty(buf)) {
suggestions.add(new InterpreterCompletion("ls", "ls", CompletionType.command.name()));
Expand Down Expand Up @@ -337,11 +340,11 @@ public List<InterpreterCompletion> completion(String buf, int cursor,
}
}
} catch (Exception e) {
logger.error("listall: listDir " + globalPath, e);
LOGGER.error("listall: listDir " + globalPath, e);
return null;
}
} else {
logger.info("path is not a directory. No values suggested.");
LOGGER.info("path is not a directory. No values suggested.");
}

//Error in string.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import com.google.gson.Gson;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;
import java.util.HashMap;
Expand Down Expand Up @@ -274,12 +275,14 @@ public String runCommand(Op op, String path, Arg[] args) throws Exception {
* Mock Interpreter - uses Mock HDFS command.
*/
class MockHDFSFileInterpreter extends HDFSFileInterpreter {
private static final Logger LOGGER = LoggerFactory.getLogger(MockHDFSFileInterpreter.class);

@Override
public void prepare() {
// Run commands against mock File System instead of WebHDFS
int i = Integer.parseInt(getProperty(HDFS_MAXLENGTH) == null ? "1000"
: getProperty(HDFS_MAXLENGTH));
cmd = new MockHDFSCommand("", "", logger, i);
cmd = new MockHDFSCommand("", "", LOGGER, i);
gson = new Gson();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

public abstract class FlinkSqlInterpreter extends AbstractInterpreter {

protected static final Logger LOGGER = LoggerFactory.getLogger(FlinkSqlInterpreter.class);
private static final Logger LOGGER = LoggerFactory.getLogger(FlinkSqlInterpreter.class);

protected FlinkInterpreter flinkInterpreter;
protected FlinkShims flinkShims;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
*
*/
public abstract class AbstractStreamSqlJob {
private static Logger LOGGER = LoggerFactory.getLogger(AbstractStreamSqlJob.class);
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractStreamSqlJob.class);

private static AtomicInteger SQL_INDEX = new AtomicInteger(0);
protected StreamExecutionEnvironment senv;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@

public class AppendStreamSqlJob extends AbstractStreamSqlJob {

private static Logger LOGGER = LoggerFactory.getLogger(UpdateStreamSqlJob.class);
private static final Logger LOGGER = LoggerFactory.getLogger(UpdateStreamSqlJob.class);

private List<Row> materializedTable = new ArrayList<>();
private long tsWindowThreshold;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
@RunWith(FlinkStandaloneHiveRunner.class)
public abstract class FlinkSqlInterpreterTest {

protected static final Logger LOGGER = LoggerFactory.getLogger(FlinkSqlInterpreterTest.class);
private static final Logger LOGGER = LoggerFactory.getLogger(FlinkSqlInterpreterTest.class);


protected FlinkInterpreter flinkInterpreter;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResultMessage;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.annotation.Nullable;
import java.io.File;
Expand All @@ -41,7 +43,7 @@
import static org.junit.Assert.assertEquals;

public class FlinkStreamSqlInterpreterTest extends FlinkSqlInterpreterTest {

private static final Logger LOGGER = LoggerFactory.getLogger(FlinkStreamSqlInterpreterTest.class);

private static class FlinkJobListener implements JobListener {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@


public class FlinkVersion {
private static final Logger logger = LoggerFactory.getLogger(FlinkVersion.class);
private static final Logger LOGGER = LoggerFactory.getLogger(FlinkVersion.class);

private int majorVersion;
private int minorVersion;
Expand Down Expand Up @@ -50,7 +50,7 @@ public FlinkVersion(String versionString) {
this.version = Integer.parseInt(String.format("%d%02d%02d",
majorVersion, minorVersion, patchVersion));
} catch (Exception e) {
logger.error("Can not recognize Flink version " + versionString +
LOGGER.error("Can not recognize Flink version " + versionString +
". Assume it's a future release", e);
}
}
Expand Down
Loading

0 comments on commit 32f2a8e

Please sign in to comment.