Skip to content

Commit

Permalink
revise listFiles
Browse files Browse the repository at this point in the history
  • Loading branch information
derrickoswald committed Sep 12, 2017
1 parent abb82c9 commit 6837ca0
Show file tree
Hide file tree
Showing 7 changed files with 99 additions and 96 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ public class ApplicationConfig extends Application
*/
public Set<Class<?>> getClasses ()
{
Class<?> pong = null;
try { pong = Class.forName ("ch.ninecode.cim.cimweb.Pong"); } catch (ClassNotFoundException e) { e.printStackTrace(); }
Class<?> ping = null;
try { ping = Class.forName ("ch.ninecode.cim.cimweb.Ping"); } catch (ClassNotFoundException e) { e.printStackTrace(); }
Class<?> list = null;
try { list = Class.forName ("ch.ninecode.cim.cimweb.ListFiles"); } catch (ClassNotFoundException e) { e.printStackTrace(); }
Class<?> spatial = null;
Expand All @@ -49,8 +49,7 @@ public Set<Class<?>> getClasses ()
(
Arrays.asList
(
Ping.class,
pong,
ping,
list,
SimpleRESTEJB.class,
EnergyConsumer.class,
Expand Down
24 changes: 0 additions & 24 deletions CIMWeb/src/main/java/ch/ninecode/cim/cimweb/Ping.java

This file was deleted.

21 changes: 16 additions & 5 deletions CIMWeb/src/main/scala/ch/ninecode/cim/cimweb/ListFiles.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package ch.ninecode.cim.cimweb
import java.util.logging.Logger
import javax.ejb.Stateless
import javax.json.JsonObject
import javax.json.Json
import javax.resource.ResourceException
import javax.ws.rs.core.MediaType
import javax.ws.rs.DefaultValue
Expand All @@ -12,6 +13,8 @@ import javax.ws.rs.Path
import javax.ws.rs.PathParam
import javax.ws.rs.Produces

import scala.collection.JavaConversions._

import ch.ninecode.cim.connector.CIMInteractionSpec
import ch.ninecode.cim.connector.CIMInteractionSpecImpl
import ch.ninecode.cim.connector.CIMMappedRecord
Expand All @@ -31,7 +34,7 @@ class ListFiles extends RESTful
listFiles ("", debug)

@GET
@Path ("{path}")
@Path ("{path:[^;]*}")
@Produces (Array (MediaType.APPLICATION_JSON))
def listFiles (
@PathParam ("path") path: String,
Expand All @@ -40,7 +43,7 @@ class ListFiles extends RESTful
val ret = new RESTfulResult
val directory = if (path.startsWith ("/")) path else "/" + path
_Logger.info ("list %s".format (directory))
val function = ListFilesFunction (directory, debug.toBoolean)
val function = ListFilesFunction (directory, try { debug.toBoolean } catch { case _: Throwable => false })
val connection = getConnection (ret)
if (null != connection)
try
Expand All @@ -58,9 +61,17 @@ class ListFiles extends RESTful
{
val record = output.asInstanceOf [CIMMappedRecord]
ret.setResult (record.get ("result").asInstanceOf [String])
val error: String = ret._Result.asInstanceOf[JsonObject].getString ("error")
if (null != error)
val response = ret._Result.asInstanceOf[JsonObject]
if (response.containsKey ("error"))
{
ret._Status = RESTful.FAIL
ret._Message = response.getString ("error")
val result = Json.createObjectBuilder
for (key <- response.keySet)
if (key != "error")
result.add (key, response.get (key))
ret.setResult (result.build)
}
}
}
catch
Expand All @@ -82,6 +93,6 @@ class ListFiles extends RESTful

object ListFiles
{
val LOGGER_NAME: String = Pong.getClass.getName
val LOGGER_NAME: String = ListFiles.getClass.getName
val _Logger: Logger = Logger.getLogger (LOGGER_NAME)
}
Original file line number Diff line number Diff line change
@@ -1,20 +1,30 @@
package ch.ninecode.cim.cimweb

import java.io.{File, StringWriter}
import java.io.File
import java.io.StringWriter
import java.net.URI
import java.util
import java.util.{HashMap, Map}
import java.util.HashMap
import java.util.Map
import javax.json.Json
import javax.json.JsonStructure
import javax.json.JsonWriterFactory
import javax.json.stream.JsonGenerator
import javax.ws.rs.core.MediaType

import ch.ninecode.cim.connector.CIMFunction.Return
import scala.collection.JavaConversions._

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
import org.apache.spark.sql.{Dataset, Row, SparkSession}
import org.apache.hadoop.fs.FileStatus
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import org.apache.hadoop.fs.permission.FsPermission
import org.apache.hadoop.security.AccessControlException
import org.apache.spark.sql.Dataset
import org.apache.spark.sql.Row
import org.apache.spark.sql.SparkSession

import ch.ninecode.cim.connector.CIMFunction.Return

case class ListFilesFunction (path: String, debug: Boolean) extends CIMWebFunction
{
Expand Down Expand Up @@ -71,47 +81,37 @@ case class ListFilesFunction (path: String, debug: Boolean) extends CIMWebFuncti
// form the response
val response = Json.createObjectBuilder
response.add ("filesystem", uri.toString)
response.add ("root", root.toString)
val temp: String = root.toString
val prefix: String = if (path.endsWith ("/")) if (temp.endsWith ("/")) temp else temp + "/" else temp
response.add ("root", prefix)
if (debug)
{
val configuration = Json.createObjectBuilder
val i1: util.Iterator[Map.Entry[String, String]] = hdfs_configuration.iterator ()
while (i1.hasNext)
{
val pair: Map.Entry[String, String] = i1.next
val key: String = pair.getKey
val value: String = pair.getValue
configuration.add (key, value)
}
for (pair <- hdfs_configuration)
configuration.add (pair.getKey, pair.getValue)
response.add ("configuration", configuration)
val environment = Json.createObjectBuilder
val env: util.Map[String, String] = System.getenv
val i2: util.Iterator[String] = env.keySet.iterator ()
while (i2.hasNext)
{
val key = i2.next
val value: String = env.get (key)
environment.add (key, value)
}
response.add ("environment", environment)
}
// read the list of files
val files = Json.createArrayBuilder
try
{
val statuses: Array[FileStatus] = hdfs.listStatus (root)
val prefix: String = root.toString
for (fs <- statuses)
{
var path: String = fs.getPath.toString
if (path.startsWith (prefix))
path = path.substring (prefix.length)
val file = Json.createObjectBuilder
file.add ("path", path)
file.add ("length", fs.getLen)
val name: String = fs.getPath.toString
file.add ("path", if (name.startsWith (prefix)) name.substring (prefix.length) else name)
file.add ("size", fs.getLen)
file.add ("modification_time", fs.getModificationTime)
file.add ("access_time", fs.getAccessTime)
file.add ("group", fs.getGroup)
file.add ("owner", fs.getOwner)
val permission: FsPermission = fs.getPermission
file.add ("permission", permission.toString)
file.add ("replication", fs.getReplication)
file.add ("block_size", fs.getBlockSize)
file.add ("is_directory", fs.isDirectory)
file.add ("is_sym_link", fs.isSymlink)
files.add (file)
}
}
Expand Down
49 changes: 49 additions & 0 deletions CIMWeb/src/main/scala/ch/ninecode/cim/cimweb/Ping.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package ch.ninecode.cim.cimweb

import java.util
import java.util.logging.Logger

import javax.ejb.Stateless
import javax.json.Json
import javax.ws.rs.core.MediaType
import javax.ws.rs.DefaultValue
import javax.ws.rs.GET
import javax.ws.rs.MatrixParam
import javax.ws.rs.Path
import javax.ws.rs.Produces

import scala.collection.JavaConversions._

@Stateless
@Path ("/ping")
class Ping extends RESTful
{
import Ping._

@GET
@Produces (Array (MediaType.APPLICATION_JSON))
def ping (@DefaultValue ("false") @MatrixParam ("debug") debug: String): String =
{
val date = new util.Date ().toString
_Logger.info ("ping @ %s".format (date))
val result = new RESTfulResult ("OK", date)
if (try { debug.toBoolean } catch { case _: Throwable => false })
{
val environment = Json.createObjectBuilder
val env: util.Map[String, String] = System.getenv
for (xx <- env)
environment.add (xx._1, xx._2)
val ret = Json.createObjectBuilder
ret.add ("environment", environment)
result.setResult (ret.build)
}

result.toString
}
}

object Ping
{
val LOGGER_NAME: String = Ping.getClass.getName
val _Logger: Logger = Logger.getLogger (LOGGER_NAME)
}
32 changes: 0 additions & 32 deletions CIMWeb/src/main/scala/ch/ninecode/cim/cimweb/Pong.scala

This file was deleted.

2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

<!-- Spark versions -->
<version.dependency.hadoop>2.7.3</version.dependency.hadoop>
<version.dependency.spark>2.1.1</version.dependency.spark>
<version.dependency.spark>2.2.0</version.dependency.spark>
<version.dependency.cimreader>2.11-2.1.1-2.2.0</version.dependency.cimreader>

<version.dependency.junit>4.12</version.dependency.junit>
Expand Down

0 comments on commit 6837ca0

Please sign in to comment.