diff --git a/.classpath b/.classpath index f2f736f..3c6f84a 100644 --- a/.classpath +++ b/.classpath @@ -1,6 +1,6 @@ - + @@ -8,6 +8,7 @@ + diff --git a/.project b/.project index dc5f95f..85c7b7c 100644 --- a/.project +++ b/.project @@ -20,4 +20,15 @@ org.eclipse.m2e.core.maven2Nature org.eclipse.jdt.core.javanature + + + 1691634215480 + + 30 + + org.eclipse.core.resources.regexFilterMatcher + node_modules|\.git|__CREATED_BY_JAVA_LANGUAGE_SERVER__ + + + diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs index 71df522..b56a5cf 100644 --- a/.settings/org.eclipse.jdt.core.prefs +++ b/.settings/org.eclipse.jdt.core.prefs @@ -1,9 +1,10 @@ eclipse.preferences.version=1 org.eclipse.jdt.core.compiler.codegen.methodParameters=generate -org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8 -org.eclipse.jdt.core.compiler.compliance=1.8 +org.eclipse.jdt.core.compiler.codegen.targetPlatform=11 +org.eclipse.jdt.core.compiler.compliance=11 org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore +org.eclipse.jdt.core.compiler.processAnnotations=enabled org.eclipse.jdt.core.compiler.release=disabled -org.eclipse.jdt.core.compiler.source=1.8 +org.eclipse.jdt.core.compiler.source=11 diff --git a/README.md b/README.md index a081e89..52b9ea8 100644 --- a/README.md +++ b/README.md @@ -42,12 +42,25 @@ server.ssl.key-store-type=PKCS12 server.ssl.key-store=classpath:my_keystore.p12 server.ssl.key-store-password=my_keystore_password server.ssl.key-alias=my_alias +#server.ssl.trust-store=classpath:my_truststore.p12 +#server.ssl.trust-store-password=my_truststore_password security.require-ssl=true spring.security.filter.order=5 -org.openeo.endpoint=https://my_openeo.url -org.openeo.public.endpoint=https://my_openeo_public.url +spring.jackson.serialization.write-dates-as-timestamps=false + +co.elasticsearch.endpoint=https://my_elastic:9200 +co.elasticsearch.service.name=openeo +co.elasticsearch.service.node.name=elk_node_hostname +co.elasticsearch.service.index.name=es_index_name +co.elasticsearch.service.truststore.password=my_password +co.elasticsearch.service.truststore.path=/path/to/elastic-certificates.p12 +co.elasticsearch.service.keystore.password=my_password +co.elasticsearch.service.keystore.path=/path/to/http.p12 +co.elasticsearch.service.username=elastic_uname +co.elasticsearch.service.password=my_password + org.openeo.tmp.dir=tmp/ org.openeo.tmp.file.expiry=60 org.openeo.file.expiry=1 @@ -64,6 +77,7 @@ org.openeo.wcps.provider.url=http://www.my-company.url org.openeo.wcps.provider.type=host org.openeo.wcps.processes.list=classpath:processes_wcps.json org.openeo.wcps.collections.list=collections_wcps.json +org.openeo.wcps.provider.type=host org.openeo.odc.endpoint=http://my_open_data_cube_endpoint org.openeo.odc.deleteResultEndpoint=http://my_open_data_cube_endpoint_for_stopping_a_job @@ -73,8 +87,12 @@ org.openeo.odc.provider.url=http://www.open_data_cube_provider.url org.openeo.odc.provider.type=host org.openeo.odc.processes.list=classpath:processes_odc.json org.openeo.odc.collections.list=collections_odc.json +org.openeo.odc.provider.type=host +org.openeo.endpoint=https://my_openeo.url +org.openeo.public.endpoint=https://my_openeo_public.url org.openeo.udf.python.endpoint=http://my_openeo_python_udf_service.url +org.openeo.udf.candela.endpoint=http://my_openeo_candela_service.url org.openeo.udf.r.endpoint=http://my_openeo_R_udf_service.url org.openeo.udf.dir=/my/udf/working/directory/ org.openeo.udf.importscript=/my/udf/import/script/import_udf.sh diff --git a/docs/elk.md b/docs/elk.md new file mode 100644 index 0000000..0a7624f --- /dev/null +++ b/docs/elk.md @@ -0,0 +1,332 @@ +# Quick introduction to the ELK stack +The ELK Stack (E stands for elasticsearch, L for logstash and K for Kibana, the three main components), is a powerful data processing and visualization solution. It includes Elasticsearch for data storage and search, Logstash for data transformation, Filebeat for data collection, and Kibana for data visualization. This stack is widely used for real-time log and event data analysis.\ +Elasticsearch is accessible and queryable via HTTP APIs and both results and query are formatted as JSON documents.\ +Since ES is a search engine, every JSON document returned is better know as "hit" + +# Our environment +For our OpenEO componentes (such as spring driver and ODC driver) we need to implement a specific version of ELK with Filebeat in order to ingest logfiles into Elasticsearch.\ +FileBeat has to be installed on client machines (that usually generate logs).\ +Logstash has better to be properly configured in the same machine as the ELK master node, in order to receive and process log entries recived from FileBeat via TCP or UDP according to specific grok regex specifications that will parse and ingest into ElasticSearch every according on our log formats + +## Network and nodes +In our specific case, we are configuring an ELK stack made up by two distributed nodes: \ +eosao13 (10.8.244.14): hosts major services and operates as the “master” for the elasticsearch services\ +eosao14 (10.8.244.15): hosts only elasticsearch as a secondary node + +Elasticsearch, due to our security policy, operates only via HTTPS (and with respectives SSL certificates too) + +# Installation and configurations + +We are sending openEO spring driver and ODC driver logs (log4j multiline json and python logging respectively) through our ELK stack + +Filebeat is configured to send logs to logstash via UPD port. FileBeat is listening on specified files for new raw log entries (each new line appended). FileBeat will then send them to LogStash on eosao13. + + + +## On eosao13 + +### Installing and configuring elasticsearch + +Stop the Elasticsearch service if already installed and active, otherwise, if not installed: + +Add Elasticsearch GPG Key + +```bash +wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo gpg --dearmor -o /usr/share/keyrings/elastic-keyring.gpg +``` + +Install required packages + +```bash +sudo apt-get install apt-transport-https +``` + + +Add Elasticsearch Repository +```bash +echo "deb [signed-by=/usr/share/keyrings/elastic-keyring.gpg] https://artifacts.elastic.co/packages/8.x/apt stable main" | sudo tee -a /etc/apt/sources.list.d/elastic-8.x.list +``` + +Update Package List +```bash +sudo apt update +``` + +Install Elasticsearch +```bash +sudo apt install elasticsearch +``` + +Start Elasticsearch Service +```bash +sudo service elasticsearch start +``` + +Generating p12 certificates + +1. while on / usr / share / elasticsearch folder, launch: +```bash +elasticsearch-certutil ca +elasticsearch-certutil cert --ca elastic-stack-ca.p12 +``` + +2. edit / etc / elasticsearch / elasticsearch.yml conf file as follows: +``` +# Impostazioni di rete +network.host: 0.0.0.0 +http.port: 9200 +transport.port: 9300 +http.host: 0.0.0.0 + +# Paths +path.data: /var/lib/elasticsearch +path.logs: /var/log/elasticsearch + +# Configurazione del cluster +cluster.name: openeo +node.name: eosao13 + +# Configurazione SSL +xpack.security.enabled: true +xpack.security.transport.ssl.enabled: true +xpack.security.transport.ssl.verification_mode: certificate +xpack.security.transport.ssl.keystore.path: /etc/elasticsearch/certs/elastic-stack-ca.p12 +xpack.security.transport.ssl.truststore.path: /etc/elasticsearch/certs/elastic-stack-ca.p12 +xpack.security.http.ssl.enabled: true +xpack.security.http.ssl.keystore.path: /etc/elasticsearch/certs/elastic-stack-ca.p12 +xpack.security.http.ssl.truststore.path: /etc/elasticsearch/certs/elastic-stack-ca.p12 + +# Altre configurazioni consigliate +discovery.seed_hosts: ["10.8.244.14", "10.8.244.15", "eosao14"] +cluster.initial_master_nodes: ["eosao13"] +``` + +### Installing logstash + + +```bash +sudo apt-get update && sudo apt-get install logstash +``` + +Start Logstash Service +```bash +sudo service logstash start +``` + +### Editing logstash conf +\ +Edit /etc/logstash/conf.d/logstash.conf as follows: + +``` +input{ + beats { + port => 5044 + type => "beats" + } +} + +filter { + if [fields][name] == "openeo-odc" { + grok { + match => { "message" => "(?m)%{TIMESTAMP_ISO8601:time}%{SPACE}%{UUID:job_id}%{SPACE}%{NOTSPACE}%{LOGLEVEL:level}%{NOTSPACE}%{SPACE}(?(.|\n)*)"} + overwrite => [ "message" ] + } + + multiline { + pattern => "^%{TIMESTAMP_ISO8601} " + negate => true + what => previous + + } + + date { + match => [ "time", "yyyy-MM-dd HH:mm:ss,SSS" ] + target => "time" + timezone => "Europe/Rome" + #target_timezone => "Etc/GMT" + } + + mutate { + gsub => ["time", " ", "T"] + gsub => ["time", ",", "."] + #gsub => ["time", "Z", ""] + replace => { "time" => "%{time}" } +} + + mutate { + add_field => { "caller" => "openeo_odc_driver" } + } + } + + + if [fields][name] == "openeo-spring" + { + + if [jobid] + { + mutate { + rename => { + "[log.level]" => "level" + "[@timestamp]" => "time" + "[message]" => "msg" + "[jobid]" => "job_id" + + add_field => { "caller" => "openeo_spring_driver" } + } + } + } + else + { + mutate { + rename => { + "[log.level]" => "level" + "[@timestamp]" => "time" + "[message]" => "msg" + + add_field => { "caller" => "openeo_spring_driver" } + } + } + } + + if ![msg] { + multiline { + pattern => "^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}" + negate => true + what => "previous" + } + } + + } +} + +output { + if [type] == "beats" { + elasticsearch { + index => "openeo_test" + hosts => ["https://localhost:9200"] + user => "elastic" + password => "e4PNwff4FgPHh+ksWpB9" + + ssl => true + ssl_certificate_verification => false + ## FIXME implement + #cacert => "/path/to/ca.crt" + # Optional: Specify client-side SSL certificate and key + #ssl_certificate => "/path/to/client.crt" + #ssl_key => "/path/to/client.key" + } + } +stdout { codec => rubydebug } +} +``` + + +## On eosao14 + +### installing and configuring elasticsearch + +Stop the Elasticsearch service if already installed and active, otherwise, if not installed: + +Repeat same steps as for eosao 13 and then +edit e / etc / elasticsearch / elasticsearch.yml as follows: +``` +# Network Settings +network.host: 0.0.0.0 +http.port: 9200 +transport.port: 9300 +http.host: 0.0.0.0 + +# Paths +path.data: /var/lib/elasticsearch +path.logs: /var/log/elasticsearch + +# Cluster config +cluster.name: openeo +node.name: eosao14 + +# SSL config +xpack.security.enabled: true +xpack.security.transport.ssl.enabled: true +xpack.security.transport.ssl.verification_mode: certificate +xpack.security.transport.ssl.keystore.path: etc/elasticsearch/certs/elastic-stack-ca.p12 +xpack.security.transport.ssl.truststore.path: /etc/elasticsearch/certs/elastic-stack-ca.p12 +xpack.security.http.ssl.enabled: true +xpack.security.http.ssl.keystore.path: /etc/elasticsearch/certs/elastic-stack-ca.p12 +xpack.security.http.ssl.truststore.path: /etc/elasticsearch/certs/elastic-stack-ca.p12 + +# Other configs +discovery.seed_hosts: ["10.8.244.14", "10.8.244.15", "eosao13", "eosao14"] +cluster.initial_master_nodes: ["eosao13"] + +``` + +# checking ELK installation +### cluster health +wget, curl or navigate on https://:9200/_cat/health, to get some info about (in plain text) cluster status and check if everything works fine on the cluster. + + +### On both eosao13 and -14 + +Test if ES and LS are working. Restart their respective system services, if necessary: + + +### On Client Host (where spring and OCD resides) + +#### Installing FileBeat +Filebeat is mandatory to be used in the same machines as the logs resides, send logs to logstash + +```bash +sudo apt-get update && sudo apt-get install filebeat +``` +```bash +sudo systemctl enable filebeat +``` + + +### Add the proper log configs on /etc/filebeat/filebeat.yml +Based on your specific machine, through FB documentation, edit your default filebeat.yml file as follows: + +```yaml +filebeat.inputs: + +# odc-driver input +- type: log + id: openeo-odc # openeo-test-01 + + enabled: true + + paths: + - /path/to/openeo_odc_driver/openeo_odc_driver/odc_backend.log + fields: + name: "openeo-odc" + +# spring-driver input +- type: log + id: openeo-spring + enabled: true + paths: + - /path/to/openeo-spring-driver/logs/openeo_1.0.0.log + fields: + name: "openeo-spring" + json.keys_under_root: true +``` + +```yaml +output.logstash: + hosts: ["X.X.X.X:5044"] #sobstitute X with real ip bytes +``` + +## Testing ELK log ingestion + +Having all ELK components running and properly configured, you can easily test if log ingestion work by: +- assuring the component (spring driver, OCD driver) is running and is generating log entries on the file you specified +- by easily search display your index's hits with https://eosao-elk-host:9200/indexname/_search + +## further notes & suggestion + +- Kibana is the ELK default dashboard and user interface where you can view, graph, analyze and graphycally edit your ES data; If you want to use this tool to get your tests and test queries faster and easier, you can follow official documentation on https://www.elastic.co/guide/index.html to install kibana and connect it to ELK stack +- Kibana uses the same Elasticsearch SSL certs, easily configurable on kibana conf file (see official doc) +- Keep your ELK passwords safe and be sure to annotate safely your p12 certificates' password (if you lose a p12 certificate password you have to regenerate it and reconfigure the whole ELK stack; while recovering ELK password such ElasticSearch HTTPAuth password is easier) +- By ingesting data into your ELK a new index, according your conf files, will be created if it this doesn't already exists + + diff --git a/src/main/java/org/openeo/spring/api/JobsApiController.java b/src/main/java/org/openeo/spring/api/JobsApiController.java index a9f863d..5c62f0d 100644 --- a/src/main/java/org/openeo/spring/api/JobsApiController.java +++ b/src/main/java/org/openeo/spring/api/JobsApiController.java @@ -15,8 +15,10 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URL; +import java.security.KeyStore; import java.security.Principal; import java.time.OffsetDateTime; +import java.util.Base64; import java.util.HashSet; import java.util.Iterator; import java.util.Map; @@ -25,6 +27,12 @@ import java.util.UUID; import javax.annotation.PostConstruct; +import javax.net.ssl.HostnameVerifier; +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLSession; +import javax.net.ssl.TrustManagerFactory; import javax.swing.event.EventListenerList; import javax.validation.Valid; import javax.validation.constraints.Min; @@ -36,6 +44,7 @@ import org.json.JSONArray; import org.json.JSONObject; import org.keycloak.representations.AccessToken; +import org.openapitools.jackson.nullable.JsonNullable; import org.openeo.spring.bearer.ITokenService; import org.openeo.spring.bearer.TokenUtil; import org.openeo.spring.components.JobScheduler; @@ -115,6 +124,27 @@ public class JobsApiController implements JobsApi { @Value("${co.elasticsearch.service.node.name}") private String serviceNodeName; + @Value("${co.elasticsearch.service.index.name}") + private String serviceIndexName; + + @Value("${co.elasticsearch.service.truststore.password}") + private String trustStorePassword; + + @Value("${co.elasticsearch.service.truststore.path}") + private String trustStorePath; + + @Value("${co.elasticsearch.service.keystore.password}") + private String keyStorePassword; + + @Value("${co.elasticsearch.service.keystore.path}") + private String keyStorePath; + + @Value("${co.elasticsearch.service.username}") + private String elasticsearchServiceUsername; + + @Value("${co.elasticsearch.service.password}") + private String elasticsearchServicePassword; + @Autowired private JobScheduler jobScheduler; @@ -379,77 +409,213 @@ public ResponseEntity debugJob( @Parameter(description = "The last identifier (property `id` of a log entry) the client has received. If provided, the back-ends only sends the entries that occured after the specified identifier. If not provided or empty, start with the first entry.") @Valid @RequestParam(value = "offset", required = false) String offset, @Min(1) @Parameter(description = "This parameter enables pagination for the endpoint and specifies the maximum number of elements that arrays in the top-level object (e.g. jobs or log entries) are allowed to contain. The only exception is the `links` array, which MUST NOT be paginated as otherwise the pagination links may be missing ins responses. If the parameter is not provided or empty, all elements are returned. Pagination is OPTIONAL and back-ends and clients may not support it. Therefore it MUST be implemented in a way that clients not supporting pagination get all resources regardless. Back-ends not supporting pagination will return all resources. If the response is paginated, the links array MUST be used to propagate the links for pagination with pre-defined `rel` types. See the links array schema for supported `rel` types. *Note:* Implementations can use all kind of pagination techniques, depending on what is supported best by their infrastructure. So it doesn't care whether it is page-based, offset-based or uses tokens for pagination. The clients will use whatever is specified in the links with the corresponding `rel` types.") @Valid @RequestParam(value = "limit", required = false) Integer limit) { LogEntries logEntries = new LogEntries(); - //TODO describe query - String elasticSearchQuery = "filebeat-7.13.3-2021.07.13-000001/_search"; + + // Construcing ES query URL on selected index + String elasticSearchQuery = serviceIndexName + "/_search?size=10000"; + + //FIXME: replace "/_search?size=10000" query with specific ES hits-retrieving pagination + // (the "size" param in paging method as ES official documentation ) + try { - //TODO query elastic search endpoint here for all log information regarding a job queued for processing. - URL url = new URL(elasticSearchEndpoint + "/" + elasticSearchQuery); - HttpURLConnection conn = (HttpURLConnection) url.openConnection(); - conn.setRequestMethod("GET"); - conn.setDoOutput(true); + + HttpURLConnection conn = null; + URL url = new URL(elasticSearchEndpoint + elasticSearchQuery); + int check_code = HttpURLConnection.HTTP_OK; + + //check if elasticSearchEndpoint is an HTTP or HTTPS URL + if (elasticSearchEndpoint.startsWith("https://")) + { + HostnameVerifier customHostnameVerifier = new HostnameVerifier() { + @Override + public boolean verify(String hostname, SSLSession session) { + // FIXME: every hostname is treated as "valid" + // For specific verification need, you can add logical part here + return true; + } + }; + + // FIXME: setting this custom HTTPS verifier (temporary) + HttpsURLConnection.setDefaultHostnameVerifier(customHostnameVerifier); + + // Load the truststore + char[] trustStorePassword_ = trustStorePassword.toCharArray(); + KeyStore trustStore = KeyStore.getInstance("JKS"); + try (InputStream trustStoreInputStream = new FileInputStream(trustStorePath)) { + trustStore.load(trustStoreInputStream, trustStorePassword_); + } + + // Create TrustManagerFactory + TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(trustStore); + + // Load the keystore + char[] keyStorePassword_ = keyStorePassword.toCharArray(); + KeyStore keyStore = KeyStore.getInstance("JKS"); + try (InputStream keyStoreInputStream = new FileInputStream(keyStorePath)) { + keyStore.load(keyStoreInputStream, keyStorePassword_); + } + + // Create KeyManagerFactory + KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); + keyManagerFactory.init(keyStore, keyStorePassword_); + + // Create SSLContext + SSLContext sslContext = SSLContext.getInstance("TLS"); + sslContext.init(keyManagerFactory.getKeyManagers(), trustManagerFactory.getTrustManagers(), null); + + // Open connection + conn = (HttpsURLConnection) url.openConnection(); + ((HttpsURLConnection) conn).setSSLSocketFactory(sslContext.getSocketFactory()); + + check_code = HttpsURLConnection.HTTP_OK; + } + + else + { + conn = (HttpURLConnection) url.openConnection(); + } + + // conn.setRequestMethod("GET"); + conn.setDoOutput(true); + conn.setRequestProperty("Content-Type", "application/json"); - StringBuilder queryString = new StringBuilder(); - //TODO insert elastic search parameters from application.properties through resource loader here - queryString.append("{"); - queryString.append(" \"query\":{"); - queryString.append(" \"bool\":{"); - queryString.append(" \"filter\":["); - queryString.append(" {\"term\": {\"service.name\":\""+ serviceName +"\"}},"); - queryString.append(" {\"term\": {\"service.node.name\":\""+ serviceNodeName +"\"}},"); - queryString.append(" {\"term\": {\"jobid\":\"" + jobId + "\"}}"); - queryString.append(" ]"); - queryString.append(" }"); - queryString.append(" },"); - queryString.append(" \"fields\":["); - queryString.append(" \"@timestamp\","); - queryString.append(" \"message\","); - queryString.append(" \"log.level\","); - queryString.append(" \"log.logger\""); - queryString.append(" ],"); - queryString.append(" \"_source\": false"); - queryString.append("}"); + + // Set authentication credentials + String authCredentials = elasticsearchServiceUsername + ":" + elasticsearchServicePassword; + byte[] encodedAuth = Base64.getEncoder().encode(authCredentials.getBytes()); + String authHeaderValue = "Basic " + new String(encodedAuth); + conn.setRequestProperty("Authorization", authHeaderValue); + + + + // Build the query using a JSON object + JSONObject termObject = new JSONObject(); + termObject.put("job_id.keyword", jobId); + + JSONObject queryObject = new JSONObject(); + queryObject.put("term", termObject); + + JSONObject requestObject = new JSONObject(); + requestObject.put("query", queryObject); + requestObject.put("_source", true); + + OutputStreamWriter out = new OutputStreamWriter(conn.getOutputStream()); - out.write(queryString.toString()); + out.write(requestObject.toString()); out.close(); - InputStream errorStream = conn.getErrorStream(); - if (errorStream != null) { - BufferedReader reader = new BufferedReader(new InputStreamReader(errorStream)); - StringBuilder errorMessage = new StringBuilder(); - String line; - while ((line = reader.readLine()) != null) { - log.error(line); - errorMessage.append(line); - errorMessage.append(System.getProperty("line.separator")); + + + // Read response + int responseCode = conn.getResponseCode(); + if (responseCode == check_code) { + InputStream errorStream = conn.getErrorStream(); + if (errorStream != null) { + BufferedReader reader = new BufferedReader(new InputStreamReader(errorStream)); + StringBuilder errorMessage = new StringBuilder(); + String line; + while ((line = reader.readLine()) != null) { + log.error(line); + errorMessage.append(line); + errorMessage.append(System.getProperty("line.separator")); + } + log.error("An error when accessing logs from elastic stac: " + errorMessage.toString()); + Error error = new Error(); + error.setCode("500"); + error.setMessage("An error when accessing logs from elastic stac: " + errorMessage.toString()); + return new ResponseEntity(error, HttpStatus.INTERNAL_SERVER_ERROR); + + } else { + ByteArrayOutputStream result = new ByteArrayOutputStream(); + byte[] buffer = new byte[1024]; + for (int length; (length = conn.getInputStream().read(buffer)) != -1; ) { + result.write(buffer, 0, length); + } + log.trace(result.toString()); + //TODO create log result as json mappable object in domain model and map directly using annotations. + //automatic parsing as below is currently failing... + JSONObject logResult = new JSONObject(result.toString()); + JSONArray results = logResult.getJSONObject("hits").getJSONArray("hits"); + + int esTotalHits = logResult.getJSONObject("hits").getJSONObject("total").getInt("value"); + + if (esTotalHits == 0) { + log.error("Wrong Job ID or empty index: ES returned 0 hits"); + Error error = new Error(); + error.setCode("422"); // error 422 + error.setMessage("Wrong Job ID or empty index: ES returned 0 hits"); + return new ResponseEntity(error, HttpStatus.INTERNAL_SERVER_ERROR); + + } + + results.forEach(item -> { + JSONObject logEntryItem = (JSONObject) item; + JSONObject logInfoFields = logEntryItem.getJSONObject("_source"); // "fields" + log.trace(logEntryItem.toString()); + log.trace(logInfoFields.toString()); + LogEntry logEntry = new LogEntry(); + + // FIELDS + + // id + logEntry.setId(logEntryItem.getString("_id")); + //--// + + // code + logEntry.setCode("HTTP" + responseCode); + //--// + + // level + //String logLevel = logInfoFields.getJSONArray("log.level").getString(0); + String logLevel = logInfoFields.getString("level"); + //logEntry.setLevel(LevelEnum.valueOf(logLevel.toUpperCase())); + //logEntry.setLevel(logEntryItem.getString("level")); + logEntry.setLevel(LevelEnum.valueOf(logLevel.toUpperCase())); + //--// + + // message + //logEntry.setMessage(logInfoFields.getJSONArray("message").getString(0)); + logEntry.setMessage(logInfoFields.getString("msg")); + //--// + + //time + logEntry.setTime(logInfoFields.getString("time")); + //--// + + // data + JsonNullable jsonNullableObject = null; + logEntry.setData(jsonNullableObject); + //--// + + // path + //LogEntryPath lep = new LogEntryPath(logInfoFields.getJSONObject("host").getString("")); + //logEntry.addPathItem((logInfoFields.getJSONObject("log").getJSONObject("file").getString("path")); // "fields" + //--// + + // usage + JsonNullable jsonNullableObject2 = null; + logEntry.setUsage(jsonNullableObject2); + //--// + + // links + //--// + + //log.info(logEntryItem);//.getJSONArray("container").getString(0)); //[for debug] + + logEntries.addLogsItem(logEntry); + }); } + } else if (responseCode > 399 && responseCode < 600) { + String errorMessage = String.format("Error: %d response from Elastic server.", responseCode); ResponseEntity response = ApiUtil.errorResponse(HttpStatus.INTERNAL_SERVER_ERROR, String.format("An error when accessing logs from elastic stac: %s", errorMessage)); log.error(response.getBody()); return response; - } else { - ByteArrayOutputStream result = new ByteArrayOutputStream(); - byte[] buffer = new byte[1024]; - for (int length; (length = conn.getInputStream().read(buffer)) != -1; ) { - result.write(buffer, 0, length); - } - log.trace(result.toString()); - //TODO create log result as json mappable object in domain model and map directly using annotations. - //automatic parsing as below is currently failing... - JSONObject logResult = new JSONObject(result.toString()); - JSONArray results = logResult.getJSONObject("hits").getJSONArray("hits"); - results.forEach(item -> { - JSONObject logEntryItem = (JSONObject) item; - JSONObject logInfoFields = logEntryItem.getJSONObject("fields"); - log.trace(logEntryItem.toString()); - log.trace(logInfoFields.toString()); - LogEntry logEntry = new LogEntry(); - String logLevel = logInfoFields.getJSONArray("log.level").getString(0); - logEntry.setLevel(LevelEnum.valueOf(logLevel.toUpperCase())); - logEntry.setMessage(logInfoFields.getJSONArray("message").getString(0)); - logEntry.setId(logEntryItem.getString("_id")); - logEntries.addLogsItem(logEntry); - }); } - } catch(Exception e) { + + conn.disconnect(); + + } catch(Exception e) { + // TODO: rows below must become a new method/function log.error("An error when accessing logs from elastic stac: " + e.getMessage()); StringBuilder builder = new StringBuilder(e.getMessage()); for (StackTraceElement element : e.getStackTrace()) { @@ -462,7 +628,6 @@ public ResponseEntity debugJob( } //TODO implement logEntry and add to logEntries list and return result with pagination links return new ResponseEntity(logEntries, HttpStatus.OK); - } /** diff --git a/src/main/java/org/openeo/spring/model/LogEntry.java b/src/main/java/org/openeo/spring/model/LogEntry.java index fd98758..579a39e 100644 --- a/src/main/java/org/openeo/spring/model/LogEntry.java +++ b/src/main/java/org/openeo/spring/model/LogEntry.java @@ -21,10 +21,10 @@ */ @ApiModel(description = "An log message that communicates information about the processed data.") @javax.annotation.Generated(value = "org.openapitools.codegen.languages.SpringCodegen", date = "2020-07-02T08:45:00.334+02:00[Europe/Rome]") -public class LogEntry { +public class LogEntry { @JsonProperty("id") private String id; - + @JsonProperty("code") private String code; @@ -41,9 +41,8 @@ public enum LevelEnum { TRACE("trace"), DEBUG("debug"); - - + private String value; LevelEnum(String value) { @@ -76,18 +75,27 @@ public static LevelEnum fromValue(String value) { @JsonProperty("message") private String message; - + + @JsonProperty("time") + private String time; + @JsonProperty("data") private JsonNullable data = JsonNullable.undefined(); - + @JsonProperty("path") @Valid private List path = new ArrayList<>(); - + + @JsonProperty("usage") + private JsonNullable usage = JsonNullable.undefined(); + @JsonProperty("links") @Valid private List links = null; + + + public LogEntry id(String id) { this.id = id; return this; @@ -108,6 +116,14 @@ public String getId() { public void setId(String id) { this.id = id; } + + /** + * TODO: define comment text about 'time' + * @return time + */ + @ApiModelProperty(example = "2023-06-28 14:06:21,023", value = "define comment text about 'time'") + + public LogEntry code(String code) { this.code = code; @@ -171,6 +187,12 @@ public void setMessage(String message) { this.message = message; } + public void setTime(String time) { + this.time = time; + } + + + public LogEntry data(Object data) { this.data = JsonNullable.of(data); return this; @@ -191,6 +213,9 @@ public void setData(JsonNullable data) { this.data = data; } + + + public LogEntry path(List path) { this.path = path; return this; @@ -218,6 +243,38 @@ public void setPath(List path) { this.path = path; } + + + + +// + + + + public LogEntry usage(Object usage) { + this.usage = JsonNullable.of(usage); + return this; + } + + /** + * TODO usage + * @return usage + */ + @ApiModelProperty(value = "usage") + + + public JsonNullable getUsage() { + return usage; + } + + public void setUsage(JsonNullable usage) { + this.usage = usage; + } + // + + + + public LogEntry links(List links) { this.links = links; return this; @@ -248,6 +305,11 @@ public void setLinks(List links) { } + + //////// + + + @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -261,27 +323,31 @@ public boolean equals(java.lang.Object o) { Objects.equals(this.code, logEntry.code) && Objects.equals(this.level, logEntry.level) && Objects.equals(this.message, logEntry.message) && + Objects.equals(this.time, logEntry.time) && Objects.equals(this.data, logEntry.data) && Objects.equals(this.path, logEntry.path) && + Objects.equals(this.usage, logEntry.usage) && Objects.equals(this.links, logEntry.links); + } @Override public int hashCode() { - return Objects.hash(id, code, level, message, data, path, links); + return Objects.hash(id, code, level, message, time, data, path, usage, links); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class LogEntry {\n"); - sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" code: ").append(toIndentedString(code)).append("\n"); sb.append(" level: ").append(toIndentedString(level)).append("\n"); sb.append(" message: ").append(toIndentedString(message)).append("\n"); + sb.append(" time: ").append(toIndentedString(time)).append("\n"); sb.append(" data: ").append(toIndentedString(data)).append("\n"); sb.append(" path: ").append(toIndentedString(path)).append("\n"); + sb.append(" usage: ").append(toIndentedString(usage)).append("\n"); sb.append(" links: ").append(toIndentedString(links)).append("\n"); sb.append("}"); return sb.toString();